1 // Copyright 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "cc/resources/tile_manager.h"
11 #include "base/bind.h"
12 #include "base/json/json_writer.h"
13 #include "base/logging.h"
14 #include "base/metrics/histogram.h"
15 #include "cc/debug/devtools_instrumentation.h"
16 #include "cc/debug/traced_value.h"
17 #include "cc/layers/picture_layer_impl.h"
18 #include "cc/resources/raster_worker_pool.h"
19 #include "cc/resources/rasterizer_delegate.h"
20 #include "cc/resources/tile.h"
21 #include "skia/ext/paint_simplifier.h"
22 #include "third_party/skia/include/core/SkBitmap.h"
23 #include "third_party/skia/include/core/SkPixelRef.h"
24 #include "ui/gfx/rect_conversions.h"
29 // Flag to indicate whether we should try and detect that
30 // a tile is of solid color.
31 const bool kUseColorEstimator
= true;
33 // Minimum width/height of a pile that would require analysis for tiles.
34 const int kMinDimensionsForAnalysis
= 256;
36 class DisableLCDTextFilter
: public SkDrawFilter
{
38 // SkDrawFilter interface.
39 virtual bool filter(SkPaint
* paint
, SkDrawFilter::Type type
) OVERRIDE
{
40 if (type
!= SkDrawFilter::kText_Type
)
43 paint
->setLCDRenderText(false);
48 class RasterTaskImpl
: public RasterTask
{
51 const Resource
* resource
,
52 PicturePileImpl
* picture_pile
,
53 const gfx::Rect
& content_rect
,
55 RasterMode raster_mode
,
56 TileResolution tile_resolution
,
59 int source_frame_number
,
61 RenderingStatsInstrumentation
* rendering_stats
,
62 const base::Callback
<void(const PicturePileImpl::Analysis
&, bool)>& reply
,
63 ImageDecodeTask::Vector
* dependencies
)
64 : RasterTask(resource
, dependencies
),
65 picture_pile_(picture_pile
),
66 content_rect_(content_rect
),
67 contents_scale_(contents_scale
),
68 raster_mode_(raster_mode
),
69 tile_resolution_(tile_resolution
),
72 source_frame_number_(source_frame_number
),
73 analyze_picture_(analyze_picture
),
74 rendering_stats_(rendering_stats
),
78 // Overridden from Task:
79 virtual void RunOnWorkerThread() OVERRIDE
{
80 TRACE_EVENT0("cc", "RasterizerTaskImpl::RunOnWorkerThread");
82 DCHECK(picture_pile_
);
84 AnalyzeAndRaster(picture_pile_
->GetCloneForDrawingOnThread(
85 RasterWorkerPool::GetPictureCloneIndexForCurrentThread()));
89 // Overridden from RasterizerTask:
90 virtual void ScheduleOnOriginThread(RasterizerTaskClient
* client
) OVERRIDE
{
92 canvas_
= client
->AcquireCanvasForRaster(this);
94 virtual void RunOnOriginThread() OVERRIDE
{
95 TRACE_EVENT0("cc", "RasterTaskImpl::RunOnOriginThread");
97 AnalyzeAndRaster(picture_pile_
);
99 virtual void CompleteOnOriginThread(RasterizerTaskClient
* client
) OVERRIDE
{
101 client
->ReleaseCanvasForRaster(this);
103 virtual void RunReplyOnOriginThread() OVERRIDE
{
105 reply_
.Run(analysis_
, !HasFinishedRunning());
109 virtual ~RasterTaskImpl() { DCHECK(!canvas_
); }
112 scoped_ptr
<base::Value
> DataAsValue() const {
113 scoped_ptr
<base::DictionaryValue
> res(new base::DictionaryValue());
114 res
->Set("tile_id", TracedValue::CreateIDRef(tile_id_
).release());
115 res
->Set("resolution", TileResolutionAsValue(tile_resolution_
).release());
116 res
->SetInteger("source_frame_number", source_frame_number_
);
117 res
->SetInteger("layer_id", layer_id_
);
118 return res
.PassAs
<base::Value
>();
121 void AnalyzeAndRaster(PicturePileImpl
* picture_pile
) {
122 DCHECK(picture_pile
);
125 if (analyze_picture_
) {
126 Analyze(picture_pile
);
127 if (analysis_
.is_solid_color
)
131 Raster(picture_pile
);
134 void Analyze(PicturePileImpl
* picture_pile
) {
136 "RasterTaskImpl::Analyze",
138 TracedValue::FromValue(DataAsValue().release()));
140 DCHECK(picture_pile
);
142 picture_pile
->AnalyzeInRect(
143 content_rect_
, contents_scale_
, &analysis_
, rendering_stats_
);
145 // Record the solid color prediction.
146 UMA_HISTOGRAM_BOOLEAN("Renderer4.SolidColorTilesAnalyzed",
147 analysis_
.is_solid_color
);
149 // Clear the flag if we're not using the estimator.
150 analysis_
.is_solid_color
&= kUseColorEstimator
;
153 void Raster(PicturePileImpl
* picture_pile
) {
156 "RasterTaskImpl::Raster",
158 TracedValue::FromValue(DataAsValue().release()),
160 TracedValue::FromValue(RasterModeAsValue(raster_mode_
).release()));
162 devtools_instrumentation::ScopedLayerTask
raster_task(
163 devtools_instrumentation::kRasterTask
, layer_id_
);
165 skia::RefPtr
<SkDrawFilter
> draw_filter
;
166 switch (raster_mode_
) {
167 case LOW_QUALITY_RASTER_MODE
:
168 draw_filter
= skia::AdoptRef(new skia::PaintSimplifier
);
170 case HIGH_QUALITY_NO_LCD_RASTER_MODE
:
171 draw_filter
= skia::AdoptRef(new DisableLCDTextFilter
);
173 case HIGH_QUALITY_RASTER_MODE
:
175 case NUM_RASTER_MODES
:
179 canvas_
->setDrawFilter(draw_filter
.get());
181 base::TimeDelta prev_rasterize_time
=
182 rendering_stats_
->impl_thread_rendering_stats().rasterize_time
;
184 // Only record rasterization time for highres tiles, because
185 // lowres tiles are not required for activation and therefore
186 // introduce noise in the measurement (sometimes they get rasterized
187 // before we draw and sometimes they aren't)
188 RenderingStatsInstrumentation
* stats
=
189 tile_resolution_
== HIGH_RESOLUTION
? rendering_stats_
: NULL
;
190 DCHECK(picture_pile
);
191 picture_pile
->RasterToBitmap(
192 canvas_
, content_rect_
, contents_scale_
, stats
);
194 if (rendering_stats_
->record_rendering_stats()) {
195 base::TimeDelta current_rasterize_time
=
196 rendering_stats_
->impl_thread_rendering_stats().rasterize_time
;
197 HISTOGRAM_CUSTOM_COUNTS(
198 "Renderer4.PictureRasterTimeUS",
199 (current_rasterize_time
- prev_rasterize_time
).InMicroseconds(),
206 PicturePileImpl::Analysis analysis_
;
207 scoped_refptr
<PicturePileImpl
> picture_pile_
;
208 gfx::Rect content_rect_
;
209 float contents_scale_
;
210 RasterMode raster_mode_
;
211 TileResolution tile_resolution_
;
213 const void* tile_id_
;
214 int source_frame_number_
;
215 bool analyze_picture_
;
216 RenderingStatsInstrumentation
* rendering_stats_
;
217 const base::Callback
<void(const PicturePileImpl::Analysis
&, bool)> reply_
;
220 DISALLOW_COPY_AND_ASSIGN(RasterTaskImpl
);
223 class ImageDecodeTaskImpl
: public ImageDecodeTask
{
225 ImageDecodeTaskImpl(SkPixelRef
* pixel_ref
,
227 RenderingStatsInstrumentation
* rendering_stats
,
228 const base::Callback
<void(bool was_canceled
)>& reply
)
229 : pixel_ref_(skia::SharePtr(pixel_ref
)),
231 rendering_stats_(rendering_stats
),
234 // Overridden from Task:
235 virtual void RunOnWorkerThread() OVERRIDE
{
236 TRACE_EVENT0("cc", "ImageDecodeTaskImpl::RunOnWorkerThread");
240 // Overridden from RasterizerTask:
241 virtual void ScheduleOnOriginThread(RasterizerTaskClient
* client
) OVERRIDE
{}
242 virtual void RunOnOriginThread() OVERRIDE
{
243 TRACE_EVENT0("cc", "ImageDecodeTaskImpl::RunOnOriginThread");
246 virtual void CompleteOnOriginThread(RasterizerTaskClient
* client
) OVERRIDE
{}
247 virtual void RunReplyOnOriginThread() OVERRIDE
{
248 reply_
.Run(!HasFinishedRunning());
252 virtual ~ImageDecodeTaskImpl() {}
256 devtools_instrumentation::ScopedImageDecodeTask
image_decode_task(
258 // This will cause the image referred to by pixel ref to be decoded.
259 pixel_ref_
->lockPixels();
260 pixel_ref_
->unlockPixels();
263 skia::RefPtr
<SkPixelRef
> pixel_ref_
;
265 RenderingStatsInstrumentation
* rendering_stats_
;
266 const base::Callback
<void(bool was_canceled
)> reply_
;
268 DISALLOW_COPY_AND_ASSIGN(ImageDecodeTaskImpl
);
271 const size_t kScheduledRasterTasksLimit
= 32u;
273 // Memory limit policy works by mapping some bin states to the NEVER bin.
274 const ManagedTileBin kBinPolicyMap
[NUM_TILE_MEMORY_LIMIT_POLICIES
][NUM_BINS
] = {
276 {NEVER_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
277 NEVER_BIN
, // [NOW_BIN]
278 NEVER_BIN
, // [SOON_BIN]
279 NEVER_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
280 NEVER_BIN
, // [EVENTUALLY_BIN]
281 NEVER_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
282 NEVER_BIN
, // [AT_LAST_BIN]
283 NEVER_BIN
// [NEVER_BIN]
285 // [ALLOW_ABSOLUTE_MINIMUM]
286 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
287 NOW_BIN
, // [NOW_BIN]
288 NEVER_BIN
, // [SOON_BIN]
289 NEVER_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
290 NEVER_BIN
, // [EVENTUALLY_BIN]
291 NEVER_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
292 NEVER_BIN
, // [AT_LAST_BIN]
293 NEVER_BIN
// [NEVER_BIN]
295 // [ALLOW_PREPAINT_ONLY]
296 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
297 NOW_BIN
, // [NOW_BIN]
298 SOON_BIN
, // [SOON_BIN]
299 NEVER_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
300 NEVER_BIN
, // [EVENTUALLY_BIN]
301 NEVER_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
302 NEVER_BIN
, // [AT_LAST_BIN]
303 NEVER_BIN
// [NEVER_BIN]
306 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
307 NOW_BIN
, // [NOW_BIN]
308 SOON_BIN
, // [SOON_BIN]
309 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
310 EVENTUALLY_BIN
, // [EVENTUALLY_BIN]
311 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
312 AT_LAST_BIN
, // [AT_LAST_BIN]
313 NEVER_BIN
// [NEVER_BIN]
316 // Ready to draw works by mapping NOW_BIN to NOW_AND_READY_TO_DRAW_BIN.
317 const ManagedTileBin kBinReadyToDrawMap
[2][NUM_BINS
] = {
319 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
320 NOW_BIN
, // [NOW_BIN]
321 SOON_BIN
, // [SOON_BIN]
322 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
323 EVENTUALLY_BIN
, // [EVENTUALLY_BIN]
324 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
325 AT_LAST_BIN
, // [AT_LAST_BIN]
326 NEVER_BIN
// [NEVER_BIN]
329 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
330 NOW_AND_READY_TO_DRAW_BIN
, // [NOW_BIN]
331 SOON_BIN
, // [SOON_BIN]
332 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
333 EVENTUALLY_BIN
, // [EVENTUALLY_BIN]
334 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
335 AT_LAST_BIN
, // [AT_LAST_BIN]
336 NEVER_BIN
// [NEVER_BIN]
339 // Active works by mapping some bin stats to equivalent _ACTIVE_BIN state.
340 const ManagedTileBin kBinIsActiveMap
[2][NUM_BINS
] = {
342 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
343 NOW_BIN
, // [NOW_BIN]
344 SOON_BIN
, // [SOON_BIN]
345 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
346 EVENTUALLY_BIN
, // [EVENTUALLY_BIN]
347 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
348 AT_LAST_BIN
, // [AT_LAST_BIN]
349 NEVER_BIN
// [NEVER_BIN]
352 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
353 NOW_BIN
, // [NOW_BIN]
354 SOON_BIN
, // [SOON_BIN]
355 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
356 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_BIN]
357 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
358 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_BIN]
359 NEVER_BIN
// [NEVER_BIN]
362 // Determine bin based on three categories of tiles: things we need now,
363 // things we need soon, and eventually.
364 inline ManagedTileBin
BinFromTilePriority(const TilePriority
& prio
) {
365 const float kBackflingGuardDistancePixels
= 314.0f
;
367 if (prio
.priority_bin
== TilePriority::NOW
)
370 if (prio
.priority_bin
== TilePriority::SOON
||
371 prio
.distance_to_visible
< kBackflingGuardDistancePixels
)
374 if (prio
.distance_to_visible
== std::numeric_limits
<float>::infinity())
377 return EVENTUALLY_BIN
;
382 RasterTaskCompletionStats::RasterTaskCompletionStats()
383 : completed_count(0u), canceled_count(0u) {}
385 scoped_ptr
<base::Value
> RasterTaskCompletionStatsAsValue(
386 const RasterTaskCompletionStats
& stats
) {
387 scoped_ptr
<base::DictionaryValue
> state(new base::DictionaryValue());
388 state
->SetInteger("completed_count", stats
.completed_count
);
389 state
->SetInteger("canceled_count", stats
.canceled_count
);
390 return state
.PassAs
<base::Value
>();
394 scoped_ptr
<TileManager
> TileManager::Create(
395 TileManagerClient
* client
,
396 ResourcePool
* resource_pool
,
397 Rasterizer
* rasterizer
,
398 Rasterizer
* gpu_rasterizer
,
399 size_t max_raster_usage_bytes
,
400 bool use_rasterize_on_demand
,
401 RenderingStatsInstrumentation
* rendering_stats_instrumentation
) {
402 return make_scoped_ptr(new TileManager(client
,
406 max_raster_usage_bytes
,
407 use_rasterize_on_demand
,
408 rendering_stats_instrumentation
));
411 TileManager::TileManager(
412 TileManagerClient
* client
,
413 ResourcePool
* resource_pool
,
414 Rasterizer
* rasterizer
,
415 Rasterizer
* gpu_rasterizer
,
416 size_t max_raster_usage_bytes
,
417 bool use_rasterize_on_demand
,
418 RenderingStatsInstrumentation
* rendering_stats_instrumentation
)
420 resource_pool_(resource_pool
),
421 prioritized_tiles_dirty_(false),
422 all_tiles_that_need_to_be_rasterized_have_memory_(true),
423 all_tiles_required_for_activation_have_memory_(true),
424 memory_required_bytes_(0),
425 memory_nice_to_have_bytes_(0),
426 bytes_releasable_(0),
427 resources_releasable_(0),
428 max_raster_usage_bytes_(max_raster_usage_bytes
),
429 ever_exceeded_memory_budget_(false),
430 rendering_stats_instrumentation_(rendering_stats_instrumentation
),
431 did_initialize_visible_tile_(false),
432 did_check_for_completed_tasks_since_last_schedule_tasks_(true),
433 use_rasterize_on_demand_(use_rasterize_on_demand
) {
434 Rasterizer
* rasterizers
[NUM_RASTERIZER_TYPES
] = {
435 rasterizer
, // RASTERIZER_TYPE_DEFAULT
436 gpu_rasterizer
, // RASTERIZER_TYPE_GPU
438 rasterizer_delegate_
=
439 RasterizerDelegate::Create(this, rasterizers
, arraysize(rasterizers
));
442 TileManager::~TileManager() {
443 // Reset global state and manage. This should cause
444 // our memory usage to drop to zero.
445 global_state_
= GlobalStateThatImpactsTilePriority();
447 CleanUpReleasedTiles();
448 DCHECK_EQ(0u, tiles_
.size());
450 RasterTaskQueue empty
[NUM_RASTERIZER_TYPES
];
451 rasterizer_delegate_
->ScheduleTasks(empty
);
452 orphan_raster_tasks_
.clear();
454 // This should finish all pending tasks and release any uninitialized
456 rasterizer_delegate_
->Shutdown();
457 rasterizer_delegate_
->CheckForCompletedTasks();
459 DCHECK_EQ(0u, bytes_releasable_
);
460 DCHECK_EQ(0u, resources_releasable_
);
462 for (std::vector
<PictureLayerImpl
*>::iterator it
= layers_
.begin();
465 (*it
)->DidUnregisterLayer();
470 void TileManager::Release(Tile
* tile
) {
471 prioritized_tiles_dirty_
= true;
472 released_tiles_
.push_back(tile
);
475 void TileManager::DidChangeTilePriority(Tile
* tile
) {
476 prioritized_tiles_dirty_
= true;
479 bool TileManager::ShouldForceTasksRequiredForActivationToComplete() const {
480 return global_state_
.tree_priority
!= SMOOTHNESS_TAKES_PRIORITY
;
483 void TileManager::CleanUpReleasedTiles() {
484 for (std::vector
<Tile
*>::iterator it
= released_tiles_
.begin();
485 it
!= released_tiles_
.end();
488 ManagedTileState
& mts
= tile
->managed_state();
490 for (int mode
= 0; mode
< NUM_RASTER_MODES
; ++mode
) {
491 FreeResourceForTile(tile
, static_cast<RasterMode
>(mode
));
492 orphan_raster_tasks_
.push_back(mts
.tile_versions
[mode
].raster_task_
);
495 DCHECK(tiles_
.find(tile
->id()) != tiles_
.end());
496 tiles_
.erase(tile
->id());
498 LayerCountMap::iterator layer_it
=
499 used_layer_counts_
.find(tile
->layer_id());
500 DCHECK_GT(layer_it
->second
, 0);
501 if (--layer_it
->second
== 0) {
502 used_layer_counts_
.erase(layer_it
);
503 image_decode_tasks_
.erase(tile
->layer_id());
509 released_tiles_
.clear();
512 void TileManager::UpdatePrioritizedTileSetIfNeeded() {
513 if (!prioritized_tiles_dirty_
)
516 CleanUpReleasedTiles();
518 prioritized_tiles_
.Clear();
519 GetTilesWithAssignedBins(&prioritized_tiles_
);
520 prioritized_tiles_dirty_
= false;
523 void TileManager::DidFinishRunningTasks() {
524 TRACE_EVENT0("cc", "TileManager::DidFinishRunningTasks");
526 bool memory_usage_above_limit
= resource_pool_
->total_memory_usage_bytes() >
527 global_state_
.soft_memory_limit_in_bytes
;
529 // When OOM, keep re-assigning memory until we reach a steady state
530 // where top-priority tiles are initialized.
531 if (all_tiles_that_need_to_be_rasterized_have_memory_
&&
532 !memory_usage_above_limit
)
535 rasterizer_delegate_
->CheckForCompletedTasks();
536 did_check_for_completed_tasks_since_last_schedule_tasks_
= true;
538 TileVector tiles_that_need_to_be_rasterized
;
539 AssignGpuMemoryToTiles(&prioritized_tiles_
,
540 &tiles_that_need_to_be_rasterized
);
542 // |tiles_that_need_to_be_rasterized| will be empty when we reach a
543 // steady memory state. Keep scheduling tasks until we reach this state.
544 if (!tiles_that_need_to_be_rasterized
.empty()) {
545 ScheduleTasks(tiles_that_need_to_be_rasterized
);
549 resource_pool_
->ReduceResourceUsage();
551 // We don't reserve memory for required-for-activation tiles during
552 // accelerated gestures, so we just postpone activation when we don't
553 // have these tiles, and activate after the accelerated gesture.
554 bool allow_rasterize_on_demand
=
555 global_state_
.tree_priority
!= SMOOTHNESS_TAKES_PRIORITY
;
557 // Use on-demand raster for any required-for-activation tiles that have not
558 // been been assigned memory after reaching a steady memory state. This
559 // ensures that we activate even when OOM.
560 for (TileMap::iterator it
= tiles_
.begin(); it
!= tiles_
.end(); ++it
) {
561 Tile
* tile
= it
->second
;
562 ManagedTileState
& mts
= tile
->managed_state();
563 ManagedTileState::TileVersion
& tile_version
=
564 mts
.tile_versions
[mts
.raster_mode
];
566 if (tile
->required_for_activation() && !tile_version
.IsReadyToDraw()) {
567 // If we can't raster on demand, give up early (and don't activate).
568 if (!allow_rasterize_on_demand
)
570 if (use_rasterize_on_demand_
)
571 tile_version
.set_rasterize_on_demand();
575 client_
->NotifyReadyToActivate();
578 void TileManager::DidFinishRunningTasksRequiredForActivation() {
579 // This is only a true indication that all tiles required for
580 // activation are initialized when no tiles are OOM. We need to
581 // wait for DidFinishRunningTasks() to be called, try to re-assign
582 // memory and in worst case use on-demand raster when tiles
583 // required for activation are OOM.
584 if (!all_tiles_required_for_activation_have_memory_
)
587 client_
->NotifyReadyToActivate();
590 void TileManager::GetTilesWithAssignedBins(PrioritizedTileSet
* tiles
) {
591 TRACE_EVENT0("cc", "TileManager::GetTilesWithAssignedBins");
593 // Compute new stats to be return by GetMemoryStats().
594 memory_required_bytes_
= 0;
595 memory_nice_to_have_bytes_
= 0;
597 const TileMemoryLimitPolicy memory_policy
= global_state_
.memory_limit_policy
;
598 const TreePriority tree_priority
= global_state_
.tree_priority
;
600 // For each tree, bin into different categories of tiles.
601 for (TileMap::const_iterator it
= tiles_
.begin(); it
!= tiles_
.end(); ++it
) {
602 Tile
* tile
= it
->second
;
603 ManagedTileState
& mts
= tile
->managed_state();
605 const ManagedTileState::TileVersion
& tile_version
=
606 tile
->GetTileVersionForDrawing();
607 bool tile_is_ready_to_draw
= tile_version
.IsReadyToDraw();
608 bool tile_is_active
= tile_is_ready_to_draw
||
609 mts
.tile_versions
[mts
.raster_mode
].raster_task_
;
611 // Get the active priority and bin.
612 TilePriority active_priority
= tile
->priority(ACTIVE_TREE
);
613 ManagedTileBin active_bin
= BinFromTilePriority(active_priority
);
615 // Get the pending priority and bin.
616 TilePriority pending_priority
= tile
->priority(PENDING_TREE
);
617 ManagedTileBin pending_bin
= BinFromTilePriority(pending_priority
);
619 bool pending_is_low_res
= pending_priority
.resolution
== LOW_RESOLUTION
;
620 bool pending_is_non_ideal
=
621 pending_priority
.resolution
== NON_IDEAL_RESOLUTION
;
622 bool active_is_non_ideal
=
623 active_priority
.resolution
== NON_IDEAL_RESOLUTION
;
625 // Adjust pending bin state for low res tiles. This prevents
626 // pending tree low-res tiles from being initialized before
628 if (pending_is_low_res
)
629 pending_bin
= std::max(pending_bin
, EVENTUALLY_BIN
);
631 // Adjust bin state based on if ready to draw.
632 active_bin
= kBinReadyToDrawMap
[tile_is_ready_to_draw
][active_bin
];
633 pending_bin
= kBinReadyToDrawMap
[tile_is_ready_to_draw
][pending_bin
];
635 // Adjust bin state based on if active.
636 active_bin
= kBinIsActiveMap
[tile_is_active
][active_bin
];
637 pending_bin
= kBinIsActiveMap
[tile_is_active
][pending_bin
];
639 // We never want to paint new non-ideal tiles, as we always have
640 // a high-res tile covering that content (paint that instead).
641 if (!tile_is_ready_to_draw
&& active_is_non_ideal
)
642 active_bin
= NEVER_BIN
;
643 if (!tile_is_ready_to_draw
&& pending_is_non_ideal
)
644 pending_bin
= NEVER_BIN
;
646 // Compute combined bin.
647 ManagedTileBin combined_bin
= std::min(active_bin
, pending_bin
);
649 if (!tile_is_ready_to_draw
|| tile_version
.requires_resource()) {
650 // The bin that the tile would have if the GPU memory manager had
651 // a maximally permissive policy, send to the GPU memory manager
652 // to determine policy.
653 ManagedTileBin gpu_memmgr_stats_bin
= combined_bin
;
654 if ((gpu_memmgr_stats_bin
== NOW_BIN
) ||
655 (gpu_memmgr_stats_bin
== NOW_AND_READY_TO_DRAW_BIN
))
656 memory_required_bytes_
+= BytesConsumedIfAllocated(tile
);
657 if (gpu_memmgr_stats_bin
!= NEVER_BIN
)
658 memory_nice_to_have_bytes_
+= BytesConsumedIfAllocated(tile
);
661 ManagedTileBin tree_bin
[NUM_TREES
];
662 tree_bin
[ACTIVE_TREE
] = kBinPolicyMap
[memory_policy
][active_bin
];
663 tree_bin
[PENDING_TREE
] = kBinPolicyMap
[memory_policy
][pending_bin
];
665 TilePriority tile_priority
;
666 switch (tree_priority
) {
667 case SAME_PRIORITY_FOR_BOTH_TREES
:
668 mts
.bin
= kBinPolicyMap
[memory_policy
][combined_bin
];
669 tile_priority
= tile
->combined_priority();
671 case SMOOTHNESS_TAKES_PRIORITY
:
672 mts
.bin
= tree_bin
[ACTIVE_TREE
];
673 tile_priority
= active_priority
;
675 case NEW_CONTENT_TAKES_PRIORITY
:
676 mts
.bin
= tree_bin
[PENDING_TREE
];
677 tile_priority
= pending_priority
;
681 // Bump up the priority if we determined it's NEVER_BIN on one tree,
682 // but is still required on the other tree.
683 bool is_in_never_bin_on_both_trees
= tree_bin
[ACTIVE_TREE
] == NEVER_BIN
&&
684 tree_bin
[PENDING_TREE
] == NEVER_BIN
;
686 if (mts
.bin
== NEVER_BIN
&& !is_in_never_bin_on_both_trees
)
687 mts
.bin
= tile_is_active
? AT_LAST_AND_ACTIVE_BIN
: AT_LAST_BIN
;
689 mts
.resolution
= tile_priority
.resolution
;
690 mts
.priority_bin
= tile_priority
.priority_bin
;
691 mts
.distance_to_visible
= tile_priority
.distance_to_visible
;
692 mts
.required_for_activation
= tile_priority
.required_for_activation
;
694 mts
.visible_and_ready_to_draw
=
695 tree_bin
[ACTIVE_TREE
] == NOW_AND_READY_TO_DRAW_BIN
;
697 // If the tile is in NEVER_BIN and it does not have an active task, then we
698 // can release the resources early. If it does have the task however, we
699 // should keep it in the prioritized tile set to ensure that AssignGpuMemory
701 if (mts
.bin
== NEVER_BIN
&&
702 !mts
.tile_versions
[mts
.raster_mode
].raster_task_
) {
703 FreeResourcesForTile(tile
);
707 // Insert the tile into a priority set.
708 tiles
->InsertTile(tile
, mts
.bin
);
712 void TileManager::ManageTiles(const GlobalStateThatImpactsTilePriority
& state
) {
713 TRACE_EVENT0("cc", "TileManager::ManageTiles");
715 // Update internal state.
716 if (state
!= global_state_
) {
717 global_state_
= state
;
718 prioritized_tiles_dirty_
= true;
721 // We need to call CheckForCompletedTasks() once in-between each call
722 // to ScheduleTasks() to prevent canceled tasks from being scheduled.
723 if (!did_check_for_completed_tasks_since_last_schedule_tasks_
) {
724 rasterizer_delegate_
->CheckForCompletedTasks();
725 did_check_for_completed_tasks_since_last_schedule_tasks_
= true;
728 UpdatePrioritizedTileSetIfNeeded();
730 TileVector tiles_that_need_to_be_rasterized
;
731 AssignGpuMemoryToTiles(&prioritized_tiles_
,
732 &tiles_that_need_to_be_rasterized
);
734 // Finally, schedule rasterizer tasks.
735 ScheduleTasks(tiles_that_need_to_be_rasterized
);
737 TRACE_EVENT_INSTANT1("cc",
739 TRACE_EVENT_SCOPE_THREAD
,
741 TracedValue::FromValue(BasicStateAsValue().release()));
743 TRACE_COUNTER_ID1("cc",
744 "unused_memory_bytes",
746 resource_pool_
->total_memory_usage_bytes() -
747 resource_pool_
->acquired_memory_usage_bytes());
750 bool TileManager::UpdateVisibleTiles() {
751 TRACE_EVENT0("cc", "TileManager::UpdateVisibleTiles");
753 rasterizer_delegate_
->CheckForCompletedTasks();
754 did_check_for_completed_tasks_since_last_schedule_tasks_
= true;
756 TRACE_EVENT_INSTANT1(
758 "DidUpdateVisibleTiles",
759 TRACE_EVENT_SCOPE_THREAD
,
761 TracedValue::FromValue(RasterTaskCompletionStatsAsValue(
762 update_visible_tiles_stats_
).release()));
763 update_visible_tiles_stats_
= RasterTaskCompletionStats();
765 bool did_initialize_visible_tile
= did_initialize_visible_tile_
;
766 did_initialize_visible_tile_
= false;
767 return did_initialize_visible_tile
;
770 void TileManager::GetMemoryStats(size_t* memory_required_bytes
,
771 size_t* memory_nice_to_have_bytes
,
772 size_t* memory_allocated_bytes
,
773 size_t* memory_used_bytes
) const {
774 *memory_required_bytes
= memory_required_bytes_
;
775 *memory_nice_to_have_bytes
= memory_nice_to_have_bytes_
;
776 *memory_allocated_bytes
= resource_pool_
->total_memory_usage_bytes();
777 *memory_used_bytes
= resource_pool_
->acquired_memory_usage_bytes();
780 scoped_ptr
<base::Value
> TileManager::BasicStateAsValue() const {
781 scoped_ptr
<base::DictionaryValue
> state(new base::DictionaryValue());
782 state
->SetInteger("tile_count", tiles_
.size());
783 state
->Set("global_state", global_state_
.AsValue().release());
784 state
->Set("memory_requirements", GetMemoryRequirementsAsValue().release());
785 return state
.PassAs
<base::Value
>();
788 scoped_ptr
<base::Value
> TileManager::AllTilesAsValue() const {
789 scoped_ptr
<base::ListValue
> state(new base::ListValue());
790 for (TileMap::const_iterator it
= tiles_
.begin(); it
!= tiles_
.end(); ++it
)
791 state
->Append(it
->second
->AsValue().release());
793 return state
.PassAs
<base::Value
>();
796 scoped_ptr
<base::Value
> TileManager::GetMemoryRequirementsAsValue() const {
797 scoped_ptr
<base::DictionaryValue
> requirements(new base::DictionaryValue());
799 size_t memory_required_bytes
;
800 size_t memory_nice_to_have_bytes
;
801 size_t memory_allocated_bytes
;
802 size_t memory_used_bytes
;
803 GetMemoryStats(&memory_required_bytes
,
804 &memory_nice_to_have_bytes
,
805 &memory_allocated_bytes
,
807 requirements
->SetInteger("memory_required_bytes", memory_required_bytes
);
808 requirements
->SetInteger("memory_nice_to_have_bytes",
809 memory_nice_to_have_bytes
);
810 requirements
->SetInteger("memory_allocated_bytes", memory_allocated_bytes
);
811 requirements
->SetInteger("memory_used_bytes", memory_used_bytes
);
812 return requirements
.PassAs
<base::Value
>();
815 void TileManager::AssignGpuMemoryToTiles(
816 PrioritizedTileSet
* tiles
,
817 TileVector
* tiles_that_need_to_be_rasterized
) {
818 TRACE_EVENT0("cc", "TileManager::AssignGpuMemoryToTiles");
820 // Maintain the list of released resources that can potentially be re-used
822 // If this operation becomes expensive too, only do this after some
823 // resource(s) was returned. Note that in that case, one also need to
824 // invalidate when releasing some resource from the pool.
825 resource_pool_
->CheckBusyResources();
827 // Now give memory out to the tiles until we're out, and build
828 // the needs-to-be-rasterized queue.
829 all_tiles_that_need_to_be_rasterized_have_memory_
= true;
830 all_tiles_required_for_activation_have_memory_
= true;
832 // Cast to prevent overflow.
833 int64 soft_bytes_available
=
834 static_cast<int64
>(bytes_releasable_
) +
835 static_cast<int64
>(global_state_
.soft_memory_limit_in_bytes
) -
836 static_cast<int64
>(resource_pool_
->acquired_memory_usage_bytes());
837 int64 hard_bytes_available
=
838 static_cast<int64
>(bytes_releasable_
) +
839 static_cast<int64
>(global_state_
.hard_memory_limit_in_bytes
) -
840 static_cast<int64
>(resource_pool_
->acquired_memory_usage_bytes());
841 int resources_available
= resources_releasable_
+
842 global_state_
.num_resources_limit
-
843 resource_pool_
->acquired_resource_count();
844 size_t soft_bytes_allocatable
=
845 std::max(static_cast<int64
>(0), soft_bytes_available
);
846 size_t hard_bytes_allocatable
=
847 std::max(static_cast<int64
>(0), hard_bytes_available
);
848 size_t resources_allocatable
= std::max(0, resources_available
);
850 size_t bytes_that_exceeded_memory_budget
= 0;
851 size_t soft_bytes_left
= soft_bytes_allocatable
;
852 size_t hard_bytes_left
= hard_bytes_allocatable
;
854 size_t resources_left
= resources_allocatable
;
855 bool oomed_soft
= false;
856 bool oomed_hard
= false;
857 bool have_hit_soft_memory
= false; // Soft memory comes after hard.
859 // Memory we assign to raster tasks now will be deducted from our memory
860 // in future iterations if priorities change. By assigning at most half
861 // the raster limit, we will always have another 50% left even if priorities
862 // change completely (assuming we check for completed/cancelled rasters
863 // between each call to this function).
864 size_t max_raster_bytes
= max_raster_usage_bytes_
/ 2;
865 size_t raster_bytes
= 0;
867 unsigned schedule_priority
= 1u;
868 for (PrioritizedTileSet::Iterator
it(tiles
, true); it
; ++it
) {
870 ManagedTileState
& mts
= tile
->managed_state();
872 mts
.scheduled_priority
= schedule_priority
++;
874 mts
.raster_mode
= tile
->DetermineOverallRasterMode();
876 ManagedTileState::TileVersion
& tile_version
=
877 mts
.tile_versions
[mts
.raster_mode
];
879 // If this tile doesn't need a resource, then nothing to do.
880 if (!tile_version
.requires_resource())
883 // If the tile is not needed, free it up.
884 if (mts
.bin
== NEVER_BIN
) {
885 FreeResourcesForTile(tile
);
889 const bool tile_uses_hard_limit
= mts
.bin
<= NOW_BIN
;
890 const size_t bytes_if_allocated
= BytesConsumedIfAllocated(tile
);
891 const size_t raster_bytes_if_rastered
= raster_bytes
+ bytes_if_allocated
;
892 const size_t tile_bytes_left
=
893 (tile_uses_hard_limit
) ? hard_bytes_left
: soft_bytes_left
;
895 // Hard-limit is reserved for tiles that would cause a calamity
896 // if they were to go away, so by definition they are the highest
897 // priority memory, and must be at the front of the list.
898 DCHECK(!(have_hit_soft_memory
&& tile_uses_hard_limit
));
899 have_hit_soft_memory
|= !tile_uses_hard_limit
;
901 size_t tile_bytes
= 0;
902 size_t tile_resources
= 0;
904 // It costs to maintain a resource.
905 for (int mode
= 0; mode
< NUM_RASTER_MODES
; ++mode
) {
906 if (mts
.tile_versions
[mode
].resource_
) {
907 tile_bytes
+= bytes_if_allocated
;
912 // Allow lower priority tiles with initialized resources to keep
913 // their memory by only assigning memory to new raster tasks if
914 // they can be scheduled.
915 if (raster_bytes_if_rastered
<= max_raster_bytes
) {
916 // If we don't have the required version, and it's not in flight
917 // then we'll have to pay to create a new task.
918 if (!tile_version
.resource_
&& !tile_version
.raster_task_
) {
919 tile_bytes
+= bytes_if_allocated
;
925 if (tile_bytes
> tile_bytes_left
|| tile_resources
> resources_left
) {
926 FreeResourcesForTile(tile
);
928 // This tile was already on screen and now its resources have been
929 // released. In order to prevent checkerboarding, set this tile as
930 // rasterize on demand immediately.
931 if (mts
.visible_and_ready_to_draw
&& use_rasterize_on_demand_
)
932 tile_version
.set_rasterize_on_demand();
935 if (tile_uses_hard_limit
) {
937 bytes_that_exceeded_memory_budget
+= tile_bytes
;
940 resources_left
-= tile_resources
;
941 hard_bytes_left
-= tile_bytes
;
943 (soft_bytes_left
> tile_bytes
) ? soft_bytes_left
- tile_bytes
: 0;
944 if (tile_version
.resource_
)
948 DCHECK(!tile_version
.resource_
);
950 // Tile shouldn't be rasterized if |tiles_that_need_to_be_rasterized|
951 // has reached it's limit or we've failed to assign gpu memory to this
952 // or any higher priority tile. Preventing tiles that fit into memory
953 // budget to be rasterized when higher priority tile is oom is
954 // important for two reasons:
955 // 1. Tile size should not impact raster priority.
956 // 2. Tiles with existing raster task could otherwise incorrectly
957 // be added as they are not affected by |bytes_allocatable|.
958 bool can_schedule_tile
=
959 !oomed_soft
&& raster_bytes_if_rastered
<= max_raster_bytes
&&
960 tiles_that_need_to_be_rasterized
->size() < kScheduledRasterTasksLimit
;
962 if (!can_schedule_tile
) {
963 all_tiles_that_need_to_be_rasterized_have_memory_
= false;
964 if (tile
->required_for_activation())
965 all_tiles_required_for_activation_have_memory_
= false;
966 it
.DisablePriorityOrdering();
970 raster_bytes
= raster_bytes_if_rastered
;
971 tiles_that_need_to_be_rasterized
->push_back(tile
);
974 // OOM reporting uses hard-limit, soft-OOM is normal depending on limit.
975 ever_exceeded_memory_budget_
|= oomed_hard
;
976 if (ever_exceeded_memory_budget_
) {
977 TRACE_COUNTER_ID2("cc",
978 "over_memory_budget",
981 global_state_
.hard_memory_limit_in_bytes
,
983 bytes_that_exceeded_memory_budget
);
985 memory_stats_from_last_assign_
.total_budget_in_bytes
=
986 global_state_
.hard_memory_limit_in_bytes
;
987 memory_stats_from_last_assign_
.bytes_allocated
=
988 hard_bytes_allocatable
- hard_bytes_left
;
989 memory_stats_from_last_assign_
.bytes_unreleasable
=
990 hard_bytes_allocatable
- bytes_releasable_
;
991 memory_stats_from_last_assign_
.bytes_over
= bytes_that_exceeded_memory_budget
;
994 void TileManager::FreeResourceForTile(Tile
* tile
, RasterMode mode
) {
995 ManagedTileState
& mts
= tile
->managed_state();
996 if (mts
.tile_versions
[mode
].resource_
) {
997 resource_pool_
->ReleaseResource(mts
.tile_versions
[mode
].resource_
.Pass());
999 DCHECK_GE(bytes_releasable_
, BytesConsumedIfAllocated(tile
));
1000 DCHECK_GE(resources_releasable_
, 1u);
1002 bytes_releasable_
-= BytesConsumedIfAllocated(tile
);
1003 --resources_releasable_
;
1007 void TileManager::FreeResourcesForTile(Tile
* tile
) {
1008 for (int mode
= 0; mode
< NUM_RASTER_MODES
; ++mode
) {
1009 FreeResourceForTile(tile
, static_cast<RasterMode
>(mode
));
1013 void TileManager::FreeUnusedResourcesForTile(Tile
* tile
) {
1014 DCHECK(tile
->IsReadyToDraw());
1015 ManagedTileState
& mts
= tile
->managed_state();
1016 RasterMode used_mode
= HIGH_QUALITY_NO_LCD_RASTER_MODE
;
1017 for (int mode
= 0; mode
< NUM_RASTER_MODES
; ++mode
) {
1018 if (mts
.tile_versions
[mode
].IsReadyToDraw()) {
1019 used_mode
= static_cast<RasterMode
>(mode
);
1024 for (int mode
= 0; mode
< NUM_RASTER_MODES
; ++mode
) {
1025 if (mode
!= used_mode
)
1026 FreeResourceForTile(tile
, static_cast<RasterMode
>(mode
));
1030 void TileManager::ScheduleTasks(
1031 const TileVector
& tiles_that_need_to_be_rasterized
) {
1033 "TileManager::ScheduleTasks",
1035 tiles_that_need_to_be_rasterized
.size());
1037 DCHECK(did_check_for_completed_tasks_since_last_schedule_tasks_
);
1039 for (size_t i
= 0; i
< NUM_RASTERIZER_TYPES
; ++i
)
1040 raster_queue_
[i
].Reset();
1042 // Build a new task queue containing all task currently needed. Tasks
1043 // are added in order of priority, highest priority task first.
1044 for (TileVector::const_iterator it
= tiles_that_need_to_be_rasterized
.begin();
1045 it
!= tiles_that_need_to_be_rasterized
.end();
1048 ManagedTileState
& mts
= tile
->managed_state();
1049 ManagedTileState::TileVersion
& tile_version
=
1050 mts
.tile_versions
[mts
.raster_mode
];
1052 DCHECK(tile_version
.requires_resource());
1053 DCHECK(!tile_version
.resource_
);
1055 if (!tile_version
.raster_task_
)
1056 tile_version
.raster_task_
= CreateRasterTask(tile
);
1058 size_t pool_type
= tile
->use_gpu_rasterization() ? RASTERIZER_TYPE_GPU
1059 : RASTERIZER_TYPE_DEFAULT
;
1061 raster_queue_
[pool_type
].items
.push_back(RasterTaskQueue::Item(
1062 tile_version
.raster_task_
.get(), tile
->required_for_activation()));
1063 raster_queue_
[pool_type
].required_for_activation_count
+=
1064 tile
->required_for_activation();
1067 // We must reduce the amount of unused resoruces before calling
1068 // ScheduleTasks to prevent usage from rising above limits.
1069 resource_pool_
->ReduceResourceUsage();
1071 // Schedule running of |raster_tasks_|. This replaces any previously
1072 // scheduled tasks and effectively cancels all tasks not present
1073 // in |raster_tasks_|.
1074 rasterizer_delegate_
->ScheduleTasks(raster_queue_
);
1076 // It's now safe to clean up orphan tasks as raster worker pool is not
1077 // allowed to keep around unreferenced raster tasks after ScheduleTasks() has
1079 orphan_raster_tasks_
.clear();
1081 did_check_for_completed_tasks_since_last_schedule_tasks_
= false;
1084 scoped_refptr
<ImageDecodeTask
> TileManager::CreateImageDecodeTask(
1086 SkPixelRef
* pixel_ref
) {
1087 return make_scoped_refptr(new ImageDecodeTaskImpl(
1090 rendering_stats_instrumentation_
,
1091 base::Bind(&TileManager::OnImageDecodeTaskCompleted
,
1092 base::Unretained(this),
1094 base::Unretained(pixel_ref
))));
1097 scoped_refptr
<RasterTask
> TileManager::CreateRasterTask(Tile
* tile
) {
1098 ManagedTileState
& mts
= tile
->managed_state();
1100 scoped_ptr
<ScopedResource
> resource
=
1101 resource_pool_
->AcquireResource(tile
->tile_size_
.size());
1102 const ScopedResource
* const_resource
= resource
.get();
1104 // Create and queue all image decode tasks that this tile depends on.
1105 ImageDecodeTask::Vector decode_tasks
;
1106 PixelRefTaskMap
& existing_pixel_refs
= image_decode_tasks_
[tile
->layer_id()];
1107 for (PicturePileImpl::PixelRefIterator
iter(
1108 tile
->content_rect(), tile
->contents_scale(), tile
->picture_pile());
1111 SkPixelRef
* pixel_ref
= *iter
;
1112 uint32_t id
= pixel_ref
->getGenerationID();
1114 // Append existing image decode task if available.
1115 PixelRefTaskMap::iterator decode_task_it
= existing_pixel_refs
.find(id
);
1116 if (decode_task_it
!= existing_pixel_refs
.end()) {
1117 decode_tasks
.push_back(decode_task_it
->second
);
1121 // Create and append new image decode task for this pixel ref.
1122 scoped_refptr
<ImageDecodeTask
> decode_task
=
1123 CreateImageDecodeTask(tile
, pixel_ref
);
1124 decode_tasks
.push_back(decode_task
);
1125 existing_pixel_refs
[id
] = decode_task
;
1128 // We analyze picture before rasterization to detect solid-color tiles.
1129 // If the tile is detected as such there is no need to raster or upload.
1130 // It is drawn directly as a solid-color quad saving raster and upload cost.
1131 // The analysis step is however expensive and is not justified when doing
1132 // gpu rasterization where there is no upload.
1134 // Additionally, we do not want to do the analysis if the layer that produced
1135 // this tile is narrow, since more likely than not the tile would not be
1136 // solid. We use the picture pile size as a proxy for layer size, since it
1137 // represents the recorded (and thus rasterizable) content.
1138 // Note that this last optimization is a heuristic that ensures that we don't
1139 // spend too much time analyzing tiles on a multitude of small layers, as it
1140 // is likely that these layers have some non-solid content.
1141 gfx::Size pile_size
= tile
->picture_pile()->tiling_rect().size();
1142 bool analyze_picture
= !tile
->use_gpu_rasterization() &&
1143 std::min(pile_size
.width(), pile_size
.height()) >=
1144 kMinDimensionsForAnalysis
;
1146 return make_scoped_refptr(
1147 new RasterTaskImpl(const_resource
,
1148 tile
->picture_pile(),
1149 tile
->content_rect(),
1150 tile
->contents_scale(),
1154 static_cast<const void*>(tile
),
1155 tile
->source_frame_number(),
1157 rendering_stats_instrumentation_
,
1158 base::Bind(&TileManager::OnRasterTaskCompleted
,
1159 base::Unretained(this),
1161 base::Passed(&resource
),
1166 void TileManager::OnImageDecodeTaskCompleted(int layer_id
,
1167 SkPixelRef
* pixel_ref
,
1168 bool was_canceled
) {
1169 // If the task was canceled, we need to clean it up
1170 // from |image_decode_tasks_|.
1174 LayerPixelRefTaskMap::iterator layer_it
= image_decode_tasks_
.find(layer_id
);
1175 if (layer_it
== image_decode_tasks_
.end())
1178 PixelRefTaskMap
& pixel_ref_tasks
= layer_it
->second
;
1179 PixelRefTaskMap::iterator task_it
=
1180 pixel_ref_tasks
.find(pixel_ref
->getGenerationID());
1182 if (task_it
!= pixel_ref_tasks
.end())
1183 pixel_ref_tasks
.erase(task_it
);
1186 void TileManager::OnRasterTaskCompleted(
1188 scoped_ptr
<ScopedResource
> resource
,
1189 RasterMode raster_mode
,
1190 const PicturePileImpl::Analysis
& analysis
,
1191 bool was_canceled
) {
1192 TileMap::iterator it
= tiles_
.find(tile_id
);
1193 if (it
== tiles_
.end()) {
1194 ++update_visible_tiles_stats_
.canceled_count
;
1195 resource_pool_
->ReleaseResource(resource
.Pass());
1199 Tile
* tile
= it
->second
;
1200 ManagedTileState
& mts
= tile
->managed_state();
1201 ManagedTileState::TileVersion
& tile_version
= mts
.tile_versions
[raster_mode
];
1202 DCHECK(tile_version
.raster_task_
);
1203 orphan_raster_tasks_
.push_back(tile_version
.raster_task_
);
1204 tile_version
.raster_task_
= NULL
;
1207 ++update_visible_tiles_stats_
.canceled_count
;
1208 resource_pool_
->ReleaseResource(resource
.Pass());
1212 ++update_visible_tiles_stats_
.completed_count
;
1214 tile_version
.set_has_text(analysis
.has_text
);
1215 if (analysis
.is_solid_color
) {
1216 tile_version
.set_solid_color(analysis
.solid_color
);
1217 resource_pool_
->ReleaseResource(resource
.Pass());
1219 tile_version
.set_use_resource();
1220 tile_version
.resource_
= resource
.Pass();
1222 bytes_releasable_
+= BytesConsumedIfAllocated(tile
);
1223 ++resources_releasable_
;
1226 FreeUnusedResourcesForTile(tile
);
1227 if (tile
->priority(ACTIVE_TREE
).distance_to_visible
== 0.f
)
1228 did_initialize_visible_tile_
= true;
1231 scoped_refptr
<Tile
> TileManager::CreateTile(PicturePileImpl
* picture_pile
,
1232 const gfx::Size
& tile_size
,
1233 const gfx::Rect
& content_rect
,
1234 const gfx::Rect
& opaque_rect
,
1235 float contents_scale
,
1237 int source_frame_number
,
1239 scoped_refptr
<Tile
> tile
= make_scoped_refptr(new Tile(this,
1246 source_frame_number
,
1248 DCHECK(tiles_
.find(tile
->id()) == tiles_
.end());
1250 tiles_
[tile
->id()] = tile
;
1251 used_layer_counts_
[tile
->layer_id()]++;
1252 prioritized_tiles_dirty_
= true;
1256 void TileManager::RegisterPictureLayerImpl(PictureLayerImpl
* layer
) {
1257 DCHECK(std::find(layers_
.begin(), layers_
.end(), layer
) == layers_
.end());
1258 layers_
.push_back(layer
);
1261 void TileManager::UnregisterPictureLayerImpl(PictureLayerImpl
* layer
) {
1262 std::vector
<PictureLayerImpl
*>::iterator it
=
1263 std::find(layers_
.begin(), layers_
.end(), layer
);
1264 DCHECK(it
!= layers_
.end());
1268 void TileManager::GetPairedPictureLayers(
1269 std::vector
<PairedPictureLayer
>* paired_layers
) const {
1270 paired_layers
->clear();
1271 // Reserve a maximum possible paired layers.
1272 paired_layers
->reserve(layers_
.size());
1274 for (std::vector
<PictureLayerImpl
*>::const_iterator it
= layers_
.begin();
1275 it
!= layers_
.end();
1277 PictureLayerImpl
* layer
= *it
;
1279 // This is a recycle tree layer, we can safely skip since the tiles on this
1280 // layer have to be accessible via the active tree.
1281 if (!layer
->IsOnActiveOrPendingTree())
1284 PictureLayerImpl
* twin_layer
= layer
->GetTwinLayer();
1286 // If the twin layer is recycled, it is not a valid twin.
1287 if (twin_layer
&& !twin_layer
->IsOnActiveOrPendingTree())
1290 PairedPictureLayer paired_layer
;
1291 WhichTree tree
= layer
->GetTree();
1293 // If the current tree is ACTIVE_TREE, then always generate a paired_layer.
1294 // If current tree is PENDING_TREE, then only generate a paired_layer if
1295 // there is no twin layer.
1296 if (tree
== ACTIVE_TREE
) {
1297 DCHECK(!twin_layer
|| twin_layer
->GetTree() == PENDING_TREE
);
1298 paired_layer
.active_layer
= layer
;
1299 paired_layer
.pending_layer
= twin_layer
;
1300 paired_layers
->push_back(paired_layer
);
1301 } else if (!twin_layer
) {
1302 paired_layer
.active_layer
= NULL
;
1303 paired_layer
.pending_layer
= layer
;
1304 paired_layers
->push_back(paired_layer
);
1309 TileManager::PairedPictureLayer::PairedPictureLayer()
1310 : active_layer(NULL
), pending_layer(NULL
) {}
1312 TileManager::PairedPictureLayer::~PairedPictureLayer() {}
1314 TileManager::RasterTileIterator::RasterTileIterator(TileManager
* tile_manager
,
1315 TreePriority tree_priority
)
1316 : tree_priority_(tree_priority
), comparator_(tree_priority
) {
1317 std::vector
<TileManager::PairedPictureLayer
> paired_layers
;
1318 tile_manager
->GetPairedPictureLayers(&paired_layers
);
1319 bool prioritize_low_res
= tree_priority_
== SMOOTHNESS_TAKES_PRIORITY
;
1321 paired_iterators_
.reserve(paired_layers
.size());
1322 iterator_heap_
.reserve(paired_layers
.size());
1323 for (std::vector
<TileManager::PairedPictureLayer
>::iterator it
=
1324 paired_layers
.begin();
1325 it
!= paired_layers
.end();
1327 PairedPictureLayerIterator paired_iterator
;
1328 if (it
->active_layer
) {
1329 paired_iterator
.active_iterator
=
1330 PictureLayerImpl::LayerRasterTileIterator(it
->active_layer
,
1331 prioritize_low_res
);
1334 if (it
->pending_layer
) {
1335 paired_iterator
.pending_iterator
=
1336 PictureLayerImpl::LayerRasterTileIterator(it
->pending_layer
,
1337 prioritize_low_res
);
1340 if (paired_iterator
.PeekTile(tree_priority_
) != NULL
) {
1341 paired_iterators_
.push_back(paired_iterator
);
1342 iterator_heap_
.push_back(&paired_iterators_
.back());
1346 std::make_heap(iterator_heap_
.begin(), iterator_heap_
.end(), comparator_
);
1349 TileManager::RasterTileIterator::~RasterTileIterator() {}
1351 TileManager::RasterTileIterator
& TileManager::RasterTileIterator::operator++() {
1354 std::pop_heap(iterator_heap_
.begin(), iterator_heap_
.end(), comparator_
);
1355 PairedPictureLayerIterator
* paired_iterator
= iterator_heap_
.back();
1356 iterator_heap_
.pop_back();
1358 paired_iterator
->PopTile(tree_priority_
);
1359 if (paired_iterator
->PeekTile(tree_priority_
) != NULL
) {
1360 iterator_heap_
.push_back(paired_iterator
);
1361 std::push_heap(iterator_heap_
.begin(), iterator_heap_
.end(), comparator_
);
1366 TileManager::RasterTileIterator::operator bool() const {
1367 return !iterator_heap_
.empty();
1370 Tile
* TileManager::RasterTileIterator::operator*() {
1372 return iterator_heap_
.front()->PeekTile(tree_priority_
);
1375 TileManager::RasterTileIterator::PairedPictureLayerIterator::
1376 PairedPictureLayerIterator() {}
1378 TileManager::RasterTileIterator::PairedPictureLayerIterator::
1379 ~PairedPictureLayerIterator() {}
1381 Tile
* TileManager::RasterTileIterator::PairedPictureLayerIterator::PeekTile(
1382 TreePriority tree_priority
) {
1383 PictureLayerImpl::LayerRasterTileIterator
* next_iterator
=
1384 NextTileIterator(tree_priority
).first
;
1388 DCHECK(*next_iterator
);
1389 DCHECK(std::find(returned_shared_tiles
.begin(),
1390 returned_shared_tiles
.end(),
1391 **next_iterator
) == returned_shared_tiles
.end());
1392 return **next_iterator
;
1395 void TileManager::RasterTileIterator::PairedPictureLayerIterator::PopTile(
1396 TreePriority tree_priority
) {
1397 PictureLayerImpl::LayerRasterTileIterator
* next_iterator
=
1398 NextTileIterator(tree_priority
).first
;
1399 DCHECK(next_iterator
);
1400 DCHECK(*next_iterator
);
1401 returned_shared_tiles
.push_back(**next_iterator
);
1404 next_iterator
= NextTileIterator(tree_priority
).first
;
1405 while (next_iterator
&&
1406 std::find(returned_shared_tiles
.begin(),
1407 returned_shared_tiles
.end(),
1408 **next_iterator
) != returned_shared_tiles
.end()) {
1410 next_iterator
= NextTileIterator(tree_priority
).first
;
1414 std::pair
<PictureLayerImpl::LayerRasterTileIterator
*, WhichTree
>
1415 TileManager::RasterTileIterator::PairedPictureLayerIterator::NextTileIterator(
1416 TreePriority tree_priority
) {
1417 // If both iterators are out of tiles, return NULL.
1418 if (!active_iterator
&& !pending_iterator
) {
1419 return std::pair
<PictureLayerImpl::LayerRasterTileIterator
*, WhichTree
>(
1423 // If we only have one iterator with tiles, return it.
1424 if (!active_iterator
)
1425 return std::make_pair(&pending_iterator
, PENDING_TREE
);
1426 if (!pending_iterator
)
1427 return std::make_pair(&active_iterator
, ACTIVE_TREE
);
1429 // Now both iterators have tiles, so we have to decide based on tree priority.
1430 switch (tree_priority
) {
1431 case SMOOTHNESS_TAKES_PRIORITY
:
1432 return std::make_pair(&active_iterator
, ACTIVE_TREE
);
1433 case NEW_CONTENT_TAKES_PRIORITY
:
1434 return std::make_pair(&pending_iterator
, ACTIVE_TREE
);
1435 case SAME_PRIORITY_FOR_BOTH_TREES
: {
1436 Tile
* active_tile
= *active_iterator
;
1437 Tile
* pending_tile
= *pending_iterator
;
1438 if (active_tile
== pending_tile
)
1439 return std::make_pair(&active_iterator
, ACTIVE_TREE
);
1441 const TilePriority
& active_priority
= active_tile
->priority(ACTIVE_TREE
);
1442 const TilePriority
& pending_priority
=
1443 pending_tile
->priority(PENDING_TREE
);
1445 if (active_priority
.IsHigherPriorityThan(pending_priority
))
1446 return std::make_pair(&active_iterator
, ACTIVE_TREE
);
1447 return std::make_pair(&pending_iterator
, PENDING_TREE
);
1452 // Keep the compiler happy.
1453 return std::pair
<PictureLayerImpl::LayerRasterTileIterator
*, WhichTree
>(
1457 TileManager::RasterTileIterator::RasterOrderComparator::RasterOrderComparator(
1458 TreePriority tree_priority
)
1459 : tree_priority_(tree_priority
) {}
1461 bool TileManager::RasterTileIterator::RasterOrderComparator::operator()(
1462 PairedPictureLayerIterator
* a
,
1463 PairedPictureLayerIterator
* b
) const {
1464 std::pair
<PictureLayerImpl::LayerRasterTileIterator
*, WhichTree
> a_pair
=
1465 a
->NextTileIterator(tree_priority_
);
1466 DCHECK(a_pair
.first
);
1467 DCHECK(*a_pair
.first
);
1469 std::pair
<PictureLayerImpl::LayerRasterTileIterator
*, WhichTree
> b_pair
=
1470 b
->NextTileIterator(tree_priority_
);
1471 DCHECK(b_pair
.first
);
1472 DCHECK(*b_pair
.first
);
1474 Tile
* a_tile
= **a_pair
.first
;
1475 Tile
* b_tile
= **b_pair
.first
;
1477 const TilePriority
& a_priority
=
1478 a_tile
->priority_for_tree_priority(tree_priority_
);
1479 const TilePriority
& b_priority
=
1480 b_tile
->priority_for_tree_priority(tree_priority_
);
1481 bool prioritize_low_res
= tree_priority_
== SMOOTHNESS_TAKES_PRIORITY
;
1483 if (b_priority
.resolution
!= a_priority
.resolution
) {
1484 return (prioritize_low_res
&& b_priority
.resolution
== LOW_RESOLUTION
) ||
1485 (!prioritize_low_res
&& b_priority
.resolution
== HIGH_RESOLUTION
) ||
1486 (a_priority
.resolution
== NON_IDEAL_RESOLUTION
);
1489 return b_priority
.IsHigherPriorityThan(a_priority
);
1492 TileManager::EvictionTileIterator::EvictionTileIterator()
1493 : comparator_(SAME_PRIORITY_FOR_BOTH_TREES
) {}
1495 TileManager::EvictionTileIterator::EvictionTileIterator(
1496 TileManager
* tile_manager
,
1497 TreePriority tree_priority
)
1498 : tree_priority_(tree_priority
), comparator_(tree_priority
) {
1499 std::vector
<TileManager::PairedPictureLayer
> paired_layers
;
1501 tile_manager
->GetPairedPictureLayers(&paired_layers
);
1503 paired_iterators_
.reserve(paired_layers
.size());
1504 iterator_heap_
.reserve(paired_layers
.size());
1505 for (std::vector
<TileManager::PairedPictureLayer
>::iterator it
=
1506 paired_layers
.begin();
1507 it
!= paired_layers
.end();
1509 PairedPictureLayerIterator paired_iterator
;
1510 if (it
->active_layer
) {
1511 paired_iterator
.active_iterator
=
1512 PictureLayerImpl::LayerEvictionTileIterator(it
->active_layer
,
1516 if (it
->pending_layer
) {
1517 paired_iterator
.pending_iterator
=
1518 PictureLayerImpl::LayerEvictionTileIterator(it
->pending_layer
,
1522 if (paired_iterator
.PeekTile(tree_priority_
) != NULL
) {
1523 paired_iterators_
.push_back(paired_iterator
);
1524 iterator_heap_
.push_back(&paired_iterators_
.back());
1528 std::make_heap(iterator_heap_
.begin(), iterator_heap_
.end(), comparator_
);
1531 TileManager::EvictionTileIterator::~EvictionTileIterator() {}
1533 TileManager::EvictionTileIterator
& TileManager::EvictionTileIterator::
1535 std::pop_heap(iterator_heap_
.begin(), iterator_heap_
.end(), comparator_
);
1536 PairedPictureLayerIterator
* paired_iterator
= iterator_heap_
.back();
1537 iterator_heap_
.pop_back();
1539 paired_iterator
->PopTile(tree_priority_
);
1540 if (paired_iterator
->PeekTile(tree_priority_
) != NULL
) {
1541 iterator_heap_
.push_back(paired_iterator
);
1542 std::push_heap(iterator_heap_
.begin(), iterator_heap_
.end(), comparator_
);
1547 TileManager::EvictionTileIterator::operator bool() const {
1548 return !iterator_heap_
.empty();
1551 Tile
* TileManager::EvictionTileIterator::operator*() {
1553 return iterator_heap_
.front()->PeekTile(tree_priority_
);
1556 TileManager::EvictionTileIterator::PairedPictureLayerIterator::
1557 PairedPictureLayerIterator() {}
1559 TileManager::EvictionTileIterator::PairedPictureLayerIterator::
1560 ~PairedPictureLayerIterator() {}
1562 Tile
* TileManager::EvictionTileIterator::PairedPictureLayerIterator::PeekTile(
1563 TreePriority tree_priority
) {
1564 PictureLayerImpl::LayerEvictionTileIterator
* next_iterator
=
1565 NextTileIterator(tree_priority
);
1569 DCHECK(*next_iterator
);
1570 DCHECK(std::find(returned_shared_tiles
.begin(),
1571 returned_shared_tiles
.end(),
1572 **next_iterator
) == returned_shared_tiles
.end());
1573 return **next_iterator
;
1576 void TileManager::EvictionTileIterator::PairedPictureLayerIterator::PopTile(
1577 TreePriority tree_priority
) {
1578 PictureLayerImpl::LayerEvictionTileIterator
* next_iterator
=
1579 NextTileIterator(tree_priority
);
1580 DCHECK(next_iterator
);
1581 DCHECK(*next_iterator
);
1582 returned_shared_tiles
.push_back(**next_iterator
);
1585 next_iterator
= NextTileIterator(tree_priority
);
1586 while (next_iterator
&&
1587 std::find(returned_shared_tiles
.begin(),
1588 returned_shared_tiles
.end(),
1589 **next_iterator
) != returned_shared_tiles
.end()) {
1591 next_iterator
= NextTileIterator(tree_priority
);
1595 PictureLayerImpl::LayerEvictionTileIterator
*
1596 TileManager::EvictionTileIterator::PairedPictureLayerIterator::NextTileIterator(
1597 TreePriority tree_priority
) {
1598 // If both iterators are out of tiles, return NULL.
1599 if (!active_iterator
&& !pending_iterator
)
1602 // If we only have one iterator with tiles, return it.
1603 if (!active_iterator
)
1604 return &pending_iterator
;
1605 if (!pending_iterator
)
1606 return &active_iterator
;
1608 Tile
* active_tile
= *active_iterator
;
1609 Tile
* pending_tile
= *pending_iterator
;
1610 if (active_tile
== pending_tile
)
1611 return &active_iterator
;
1613 const TilePriority
& active_priority
=
1614 active_tile
->priority_for_tree_priority(tree_priority
);
1615 const TilePriority
& pending_priority
=
1616 pending_tile
->priority_for_tree_priority(tree_priority
);
1618 if (pending_priority
.IsHigherPriorityThan(active_priority
))
1619 return &active_iterator
;
1620 return &pending_iterator
;
1623 TileManager::EvictionTileIterator::EvictionOrderComparator::
1624 EvictionOrderComparator(TreePriority tree_priority
)
1625 : tree_priority_(tree_priority
) {}
1627 bool TileManager::EvictionTileIterator::EvictionOrderComparator::operator()(
1628 PairedPictureLayerIterator
* a
,
1629 PairedPictureLayerIterator
* b
) const {
1630 PictureLayerImpl::LayerEvictionTileIterator
* a_iterator
=
1631 a
->NextTileIterator(tree_priority_
);
1633 DCHECK(*a_iterator
);
1635 PictureLayerImpl::LayerEvictionTileIterator
* b_iterator
=
1636 b
->NextTileIterator(tree_priority_
);
1638 DCHECK(*b_iterator
);
1640 Tile
* a_tile
= **a_iterator
;
1641 Tile
* b_tile
= **b_iterator
;
1643 const TilePriority
& a_priority
=
1644 a_tile
->priority_for_tree_priority(tree_priority_
);
1645 const TilePriority
& b_priority
=
1646 b_tile
->priority_for_tree_priority(tree_priority_
);
1647 bool prioritize_low_res
= tree_priority_
!= SMOOTHNESS_TAKES_PRIORITY
;
1649 if (b_priority
.resolution
!= a_priority
.resolution
) {
1650 return (prioritize_low_res
&& b_priority
.resolution
== LOW_RESOLUTION
) ||
1651 (!prioritize_low_res
&& b_priority
.resolution
== HIGH_RESOLUTION
) ||
1652 (a_priority
.resolution
== NON_IDEAL_RESOLUTION
);
1654 return a_priority
.IsHigherPriorityThan(b_priority
);