1 // Copyright 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "cc/resources/tile_manager.h"
11 #include "base/bind.h"
12 #include "base/json/json_writer.h"
13 #include "base/logging.h"
14 #include "base/metrics/histogram.h"
15 #include "cc/debug/devtools_instrumentation.h"
16 #include "cc/debug/traced_value.h"
17 #include "cc/layers/picture_layer_impl.h"
18 #include "cc/resources/direct_raster_worker_pool.h"
19 #include "cc/resources/image_raster_worker_pool.h"
20 #include "cc/resources/pixel_buffer_raster_worker_pool.h"
21 #include "cc/resources/raster_worker_pool_delegate.h"
22 #include "cc/resources/tile.h"
23 #include "skia/ext/paint_simplifier.h"
24 #include "third_party/skia/include/core/SkBitmap.h"
25 #include "third_party/skia/include/core/SkPixelRef.h"
26 #include "ui/gfx/rect_conversions.h"
31 // Flag to indicate whether we should try and detect that
32 // a tile is of solid color.
33 const bool kUseColorEstimator
= true;
35 class DisableLCDTextFilter
: public SkDrawFilter
{
37 // SkDrawFilter interface.
38 virtual bool filter(SkPaint
* paint
, SkDrawFilter::Type type
) OVERRIDE
{
39 if (type
!= SkDrawFilter::kText_Type
)
42 paint
->setLCDRenderText(false);
47 class RasterWorkerPoolTaskImpl
: public internal::RasterWorkerPoolTask
{
49 RasterWorkerPoolTaskImpl(
50 const Resource
* resource
,
51 PicturePileImpl
* picture_pile
,
52 const gfx::Rect
& content_rect
,
54 RasterMode raster_mode
,
55 TileResolution tile_resolution
,
58 int source_frame_number
,
60 RenderingStatsInstrumentation
* rendering_stats
,
61 const base::Callback
<void(const PicturePileImpl::Analysis
&, bool)>& reply
,
62 internal::WorkerPoolTask::Vector
* dependencies
)
63 : internal::RasterWorkerPoolTask(resource
, dependencies
),
64 picture_pile_(picture_pile
),
65 content_rect_(content_rect
),
66 contents_scale_(contents_scale
),
67 raster_mode_(raster_mode
),
68 tile_resolution_(tile_resolution
),
71 source_frame_number_(source_frame_number
),
72 analyze_picture_(analyze_picture
),
73 rendering_stats_(rendering_stats
),
77 // Overridden from internal::Task:
78 virtual void RunOnWorkerThread() OVERRIDE
{
79 TRACE_EVENT0("cc", "RasterWorkerPoolTaskImpl::RunOnWorkerThread");
81 DCHECK(picture_pile_
);
83 AnalyzeAndRaster(picture_pile_
->GetCloneForDrawingOnThread(
84 RasterWorkerPool::GetPictureCloneIndexForCurrentThread()));
88 // Overridden from internal::WorkerPoolTask:
89 virtual void ScheduleOnOriginThread(internal::WorkerPoolTaskClient
* client
)
92 canvas_
= client
->AcquireCanvasForRaster(this, resource());
94 virtual void RunOnOriginThread() OVERRIDE
{
95 TRACE_EVENT0("cc", "RasterWorkerPoolTaskImpl::RunOnOriginThread");
97 AnalyzeAndRaster(picture_pile_
);
99 virtual void CompleteOnOriginThread(internal::WorkerPoolTaskClient
* client
)
102 client
->ReleaseCanvasForRaster(this, resource());
104 virtual void RunReplyOnOriginThread() OVERRIDE
{
106 reply_
.Run(analysis_
, !HasFinishedRunning());
110 virtual ~RasterWorkerPoolTaskImpl() { DCHECK(!canvas_
); }
113 scoped_ptr
<base::Value
> DataAsValue() const {
114 scoped_ptr
<base::DictionaryValue
> res(new base::DictionaryValue());
115 res
->Set("tile_id", TracedValue::CreateIDRef(tile_id_
).release());
116 res
->Set("resolution", TileResolutionAsValue(tile_resolution_
).release());
117 res
->SetInteger("source_frame_number", source_frame_number_
);
118 res
->SetInteger("layer_id", layer_id_
);
119 return res
.PassAs
<base::Value
>();
122 void AnalyzeAndRaster(PicturePileImpl
* picture_pile
) {
123 DCHECK(picture_pile
);
126 if (analyze_picture_
) {
127 Analyze(picture_pile
);
128 if (analysis_
.is_solid_color
)
132 Raster(picture_pile
);
135 void Analyze(PicturePileImpl
* picture_pile
) {
137 "RasterWorkerPoolTaskImpl::Analyze",
139 TracedValue::FromValue(DataAsValue().release()));
141 DCHECK(picture_pile
);
143 picture_pile
->AnalyzeInRect(
144 content_rect_
, contents_scale_
, &analysis_
, rendering_stats_
);
146 // Record the solid color prediction.
147 UMA_HISTOGRAM_BOOLEAN("Renderer4.SolidColorTilesAnalyzed",
148 analysis_
.is_solid_color
);
150 // Clear the flag if we're not using the estimator.
151 analysis_
.is_solid_color
&= kUseColorEstimator
;
154 void Raster(PicturePileImpl
* picture_pile
) {
157 "RasterWorkerPoolTaskImpl::Raster",
159 TracedValue::FromValue(DataAsValue().release()),
161 TracedValue::FromValue(RasterModeAsValue(raster_mode_
).release()));
163 devtools_instrumentation::ScopedLayerTask
raster_task(
164 devtools_instrumentation::kRasterTask
, layer_id_
);
166 skia::RefPtr
<SkDrawFilter
> draw_filter
;
167 switch (raster_mode_
) {
168 case LOW_QUALITY_RASTER_MODE
:
169 draw_filter
= skia::AdoptRef(new skia::PaintSimplifier
);
171 case HIGH_QUALITY_NO_LCD_RASTER_MODE
:
172 draw_filter
= skia::AdoptRef(new DisableLCDTextFilter
);
174 case HIGH_QUALITY_RASTER_MODE
:
176 case NUM_RASTER_MODES
:
180 canvas_
->setDrawFilter(draw_filter
.get());
182 base::TimeDelta prev_rasterize_time
=
183 rendering_stats_
->impl_thread_rendering_stats().rasterize_time
;
185 // Only record rasterization time for highres tiles, because
186 // lowres tiles are not required for activation and therefore
187 // introduce noise in the measurement (sometimes they get rasterized
188 // before we draw and sometimes they aren't)
189 RenderingStatsInstrumentation
* stats
=
190 tile_resolution_
== HIGH_RESOLUTION
? rendering_stats_
: NULL
;
191 DCHECK(picture_pile
);
192 picture_pile
->RasterToBitmap(
193 canvas_
, content_rect_
, contents_scale_
, stats
);
195 if (rendering_stats_
->record_rendering_stats()) {
196 base::TimeDelta current_rasterize_time
=
197 rendering_stats_
->impl_thread_rendering_stats().rasterize_time
;
198 HISTOGRAM_CUSTOM_COUNTS(
199 "Renderer4.PictureRasterTimeUS",
200 (current_rasterize_time
- prev_rasterize_time
).InMicroseconds(),
207 PicturePileImpl::Analysis analysis_
;
208 scoped_refptr
<PicturePileImpl
> picture_pile_
;
209 gfx::Rect content_rect_
;
210 float contents_scale_
;
211 RasterMode raster_mode_
;
212 TileResolution tile_resolution_
;
214 const void* tile_id_
;
215 int source_frame_number_
;
216 bool analyze_picture_
;
217 RenderingStatsInstrumentation
* rendering_stats_
;
218 const base::Callback
<void(const PicturePileImpl::Analysis
&, bool)> reply_
;
221 DISALLOW_COPY_AND_ASSIGN(RasterWorkerPoolTaskImpl
);
224 class ImageDecodeWorkerPoolTaskImpl
: public internal::WorkerPoolTask
{
226 ImageDecodeWorkerPoolTaskImpl(
227 SkPixelRef
* pixel_ref
,
229 RenderingStatsInstrumentation
* rendering_stats
,
230 const base::Callback
<void(bool was_canceled
)>& reply
)
231 : pixel_ref_(skia::SharePtr(pixel_ref
)),
233 rendering_stats_(rendering_stats
),
236 // Overridden from internal::Task:
237 virtual void RunOnWorkerThread() OVERRIDE
{
238 TRACE_EVENT0("cc", "ImageDecodeWorkerPoolTaskImpl::RunOnWorkerThread");
242 // Overridden from internal::WorkerPoolTask:
243 virtual void ScheduleOnOriginThread(internal::WorkerPoolTaskClient
* client
)
245 virtual void RunOnOriginThread() OVERRIDE
{
246 TRACE_EVENT0("cc", "ImageDecodeWorkerPoolTaskImpl::RunOnOriginThread");
249 virtual void CompleteOnOriginThread(internal::WorkerPoolTaskClient
* client
)
251 virtual void RunReplyOnOriginThread() OVERRIDE
{
252 reply_
.Run(!HasFinishedRunning());
256 virtual ~ImageDecodeWorkerPoolTaskImpl() {}
260 devtools_instrumentation::ScopedImageDecodeTask
image_decode_task(
262 // This will cause the image referred to by pixel ref to be decoded.
263 pixel_ref_
->lockPixels();
264 pixel_ref_
->unlockPixels();
267 skia::RefPtr
<SkPixelRef
> pixel_ref_
;
269 RenderingStatsInstrumentation
* rendering_stats_
;
270 const base::Callback
<void(bool was_canceled
)> reply_
;
272 DISALLOW_COPY_AND_ASSIGN(ImageDecodeWorkerPoolTaskImpl
);
275 const size_t kScheduledRasterTasksLimit
= 32u;
277 // Memory limit policy works by mapping some bin states to the NEVER bin.
278 const ManagedTileBin kBinPolicyMap
[NUM_TILE_MEMORY_LIMIT_POLICIES
][NUM_BINS
] = {
280 {NEVER_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
281 NEVER_BIN
, // [NOW_BIN]
282 NEVER_BIN
, // [SOON_BIN]
283 NEVER_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
284 NEVER_BIN
, // [EVENTUALLY_BIN]
285 NEVER_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
286 NEVER_BIN
, // [AT_LAST_BIN]
287 NEVER_BIN
// [NEVER_BIN]
289 // [ALLOW_ABSOLUTE_MINIMUM]
290 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
291 NOW_BIN
, // [NOW_BIN]
292 NEVER_BIN
, // [SOON_BIN]
293 NEVER_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
294 NEVER_BIN
, // [EVENTUALLY_BIN]
295 NEVER_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
296 NEVER_BIN
, // [AT_LAST_BIN]
297 NEVER_BIN
// [NEVER_BIN]
299 // [ALLOW_PREPAINT_ONLY]
300 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
301 NOW_BIN
, // [NOW_BIN]
302 SOON_BIN
, // [SOON_BIN]
303 NEVER_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
304 NEVER_BIN
, // [EVENTUALLY_BIN]
305 NEVER_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
306 NEVER_BIN
, // [AT_LAST_BIN]
307 NEVER_BIN
// [NEVER_BIN]
310 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
311 NOW_BIN
, // [NOW_BIN]
312 SOON_BIN
, // [SOON_BIN]
313 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
314 EVENTUALLY_BIN
, // [EVENTUALLY_BIN]
315 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
316 AT_LAST_BIN
, // [AT_LAST_BIN]
317 NEVER_BIN
// [NEVER_BIN]
320 // Ready to draw works by mapping NOW_BIN to NOW_AND_READY_TO_DRAW_BIN.
321 const ManagedTileBin kBinReadyToDrawMap
[2][NUM_BINS
] = {
323 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
324 NOW_BIN
, // [NOW_BIN]
325 SOON_BIN
, // [SOON_BIN]
326 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
327 EVENTUALLY_BIN
, // [EVENTUALLY_BIN]
328 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
329 AT_LAST_BIN
, // [AT_LAST_BIN]
330 NEVER_BIN
// [NEVER_BIN]
333 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
334 NOW_AND_READY_TO_DRAW_BIN
, // [NOW_BIN]
335 SOON_BIN
, // [SOON_BIN]
336 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
337 EVENTUALLY_BIN
, // [EVENTUALLY_BIN]
338 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
339 AT_LAST_BIN
, // [AT_LAST_BIN]
340 NEVER_BIN
// [NEVER_BIN]
343 // Active works by mapping some bin stats to equivalent _ACTIVE_BIN state.
344 const ManagedTileBin kBinIsActiveMap
[2][NUM_BINS
] = {
346 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
347 NOW_BIN
, // [NOW_BIN]
348 SOON_BIN
, // [SOON_BIN]
349 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
350 EVENTUALLY_BIN
, // [EVENTUALLY_BIN]
351 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
352 AT_LAST_BIN
, // [AT_LAST_BIN]
353 NEVER_BIN
// [NEVER_BIN]
356 {NOW_AND_READY_TO_DRAW_BIN
, // [NOW_AND_READY_TO_DRAW_BIN]
357 NOW_BIN
, // [NOW_BIN]
358 SOON_BIN
, // [SOON_BIN]
359 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_AND_ACTIVE_BIN]
360 EVENTUALLY_AND_ACTIVE_BIN
, // [EVENTUALLY_BIN]
361 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_AND_ACTIVE_BIN]
362 AT_LAST_AND_ACTIVE_BIN
, // [AT_LAST_BIN]
363 NEVER_BIN
// [NEVER_BIN]
366 // Determine bin based on three categories of tiles: things we need now,
367 // things we need soon, and eventually.
368 inline ManagedTileBin
BinFromTilePriority(const TilePriority
& prio
) {
369 const float kBackflingGuardDistancePixels
= 314.0f
;
371 if (prio
.priority_bin
== TilePriority::NOW
)
374 if (prio
.priority_bin
== TilePriority::SOON
||
375 prio
.distance_to_visible
< kBackflingGuardDistancePixels
)
378 if (prio
.distance_to_visible
== std::numeric_limits
<float>::infinity())
381 return EVENTUALLY_BIN
;
386 RasterTaskCompletionStats::RasterTaskCompletionStats()
387 : completed_count(0u), canceled_count(0u) {}
389 scoped_ptr
<base::Value
> RasterTaskCompletionStatsAsValue(
390 const RasterTaskCompletionStats
& stats
) {
391 scoped_ptr
<base::DictionaryValue
> state(new base::DictionaryValue());
392 state
->SetInteger("completed_count", stats
.completed_count
);
393 state
->SetInteger("canceled_count", stats
.canceled_count
);
394 return state
.PassAs
<base::Value
>();
398 scoped_ptr
<TileManager
> TileManager::Create(
399 TileManagerClient
* client
,
400 base::SequencedTaskRunner
* task_runner
,
401 ResourceProvider
* resource_provider
,
402 ContextProvider
* context_provider
,
403 RenderingStatsInstrumentation
* rendering_stats_instrumentation
,
405 bool use_rasterize_on_demand
,
406 size_t max_transfer_buffer_usage_bytes
,
407 size_t max_raster_usage_bytes
,
408 unsigned map_image_texture_target
) {
409 return make_scoped_ptr(new TileManager(
415 ? ImageRasterWorkerPool::Create(
416 task_runner
, resource_provider
, map_image_texture_target
)
417 : PixelBufferRasterWorkerPool::Create(
420 max_transfer_buffer_usage_bytes
),
421 DirectRasterWorkerPool::Create(
422 task_runner
, resource_provider
, context_provider
),
423 max_raster_usage_bytes
,
424 rendering_stats_instrumentation
,
425 use_rasterize_on_demand
));
428 TileManager::TileManager(
429 TileManagerClient
* client
,
430 base::SequencedTaskRunner
* task_runner
,
431 ResourceProvider
* resource_provider
,
432 ContextProvider
* context_provider
,
433 scoped_ptr
<RasterWorkerPool
> raster_worker_pool
,
434 scoped_ptr
<RasterWorkerPool
> direct_raster_worker_pool
,
435 size_t max_raster_usage_bytes
,
436 RenderingStatsInstrumentation
* rendering_stats_instrumentation
,
437 bool use_rasterize_on_demand
)
439 context_provider_(context_provider
),
441 ResourcePool::Create(resource_provider
,
442 raster_worker_pool
->GetResourceTarget(),
443 raster_worker_pool
->GetResourceFormat())),
444 raster_worker_pool_(raster_worker_pool
.Pass()),
445 direct_raster_worker_pool_(direct_raster_worker_pool
.Pass()),
446 prioritized_tiles_dirty_(false),
447 all_tiles_that_need_to_be_rasterized_have_memory_(true),
448 all_tiles_required_for_activation_have_memory_(true),
449 memory_required_bytes_(0),
450 memory_nice_to_have_bytes_(0),
451 bytes_releasable_(0),
452 resources_releasable_(0),
453 max_raster_usage_bytes_(max_raster_usage_bytes
),
454 ever_exceeded_memory_budget_(false),
455 rendering_stats_instrumentation_(rendering_stats_instrumentation
),
456 did_initialize_visible_tile_(false),
457 did_check_for_completed_tasks_since_last_schedule_tasks_(true),
458 use_rasterize_on_demand_(use_rasterize_on_demand
) {
459 RasterWorkerPool
* raster_worker_pools
[NUM_RASTER_WORKER_POOL_TYPES
] = {
460 raster_worker_pool_
.get(), // RASTER_WORKER_POOL_TYPE_DEFAULT
461 direct_raster_worker_pool_
.get() // RASTER_WORKER_POOL_TYPE_DIRECT
463 raster_worker_pool_delegate_
= RasterWorkerPoolDelegate::Create(
464 this, raster_worker_pools
, arraysize(raster_worker_pools
));
467 TileManager::~TileManager() {
468 // Reset global state and manage. This should cause
469 // our memory usage to drop to zero.
470 global_state_
= GlobalStateThatImpactsTilePriority();
472 CleanUpReleasedTiles();
473 DCHECK_EQ(0u, tiles_
.size());
475 RasterTaskQueue empty
[NUM_RASTER_WORKER_POOL_TYPES
];
476 raster_worker_pool_delegate_
->ScheduleTasks(empty
);
477 orphan_raster_tasks_
.clear();
479 // This should finish all pending tasks and release any uninitialized
481 raster_worker_pool_delegate_
->Shutdown();
482 raster_worker_pool_delegate_
->CheckForCompletedTasks();
484 DCHECK_EQ(0u, bytes_releasable_
);
485 DCHECK_EQ(0u, resources_releasable_
);
487 for (std::vector
<PictureLayerImpl
*>::iterator it
= layers_
.begin();
490 (*it
)->DidUnregisterLayer();
495 void TileManager::Release(Tile
* tile
) {
496 prioritized_tiles_dirty_
= true;
497 released_tiles_
.push_back(tile
);
500 void TileManager::DidChangeTilePriority(Tile
* tile
) {
501 prioritized_tiles_dirty_
= true;
504 bool TileManager::ShouldForceTasksRequiredForActivationToComplete() const {
505 return global_state_
.tree_priority
!= SMOOTHNESS_TAKES_PRIORITY
;
508 void TileManager::CleanUpReleasedTiles() {
509 for (std::vector
<Tile
*>::iterator it
= released_tiles_
.begin();
510 it
!= released_tiles_
.end();
513 ManagedTileState
& mts
= tile
->managed_state();
515 for (int mode
= 0; mode
< NUM_RASTER_MODES
; ++mode
) {
516 FreeResourceForTile(tile
, static_cast<RasterMode
>(mode
));
517 orphan_raster_tasks_
.push_back(mts
.tile_versions
[mode
].raster_task_
);
520 DCHECK(tiles_
.find(tile
->id()) != tiles_
.end());
521 tiles_
.erase(tile
->id());
523 LayerCountMap::iterator layer_it
=
524 used_layer_counts_
.find(tile
->layer_id());
525 DCHECK_GT(layer_it
->second
, 0);
526 if (--layer_it
->second
== 0) {
527 used_layer_counts_
.erase(layer_it
);
528 image_decode_tasks_
.erase(tile
->layer_id());
534 released_tiles_
.clear();
537 void TileManager::UpdatePrioritizedTileSetIfNeeded() {
538 if (!prioritized_tiles_dirty_
)
541 CleanUpReleasedTiles();
543 prioritized_tiles_
.Clear();
544 GetTilesWithAssignedBins(&prioritized_tiles_
);
545 prioritized_tiles_dirty_
= false;
548 void TileManager::DidFinishRunningTasks() {
549 TRACE_EVENT0("cc", "TileManager::DidFinishRunningTasks");
551 bool memory_usage_above_limit
= resource_pool_
->total_memory_usage_bytes() >
552 global_state_
.soft_memory_limit_in_bytes
;
554 // When OOM, keep re-assigning memory until we reach a steady state
555 // where top-priority tiles are initialized.
556 if (all_tiles_that_need_to_be_rasterized_have_memory_
&&
557 !memory_usage_above_limit
)
560 raster_worker_pool_delegate_
->CheckForCompletedTasks();
561 did_check_for_completed_tasks_since_last_schedule_tasks_
= true;
563 TileVector tiles_that_need_to_be_rasterized
;
564 AssignGpuMemoryToTiles(&prioritized_tiles_
,
565 &tiles_that_need_to_be_rasterized
);
567 // |tiles_that_need_to_be_rasterized| will be empty when we reach a
568 // steady memory state. Keep scheduling tasks until we reach this state.
569 if (!tiles_that_need_to_be_rasterized
.empty()) {
570 ScheduleTasks(tiles_that_need_to_be_rasterized
);
574 resource_pool_
->ReduceResourceUsage();
576 // We don't reserve memory for required-for-activation tiles during
577 // accelerated gestures, so we just postpone activation when we don't
578 // have these tiles, and activate after the accelerated gesture.
579 bool allow_rasterize_on_demand
=
580 global_state_
.tree_priority
!= SMOOTHNESS_TAKES_PRIORITY
;
582 // Use on-demand raster for any required-for-activation tiles that have not
583 // been been assigned memory after reaching a steady memory state. This
584 // ensures that we activate even when OOM.
585 for (TileMap::iterator it
= tiles_
.begin(); it
!= tiles_
.end(); ++it
) {
586 Tile
* tile
= it
->second
;
587 ManagedTileState
& mts
= tile
->managed_state();
588 ManagedTileState::TileVersion
& tile_version
=
589 mts
.tile_versions
[mts
.raster_mode
];
591 if (tile
->required_for_activation() && !tile_version
.IsReadyToDraw()) {
592 // If we can't raster on demand, give up early (and don't activate).
593 if (!allow_rasterize_on_demand
)
595 if (use_rasterize_on_demand_
)
596 tile_version
.set_rasterize_on_demand();
600 client_
->NotifyReadyToActivate();
603 void TileManager::DidFinishRunningTasksRequiredForActivation() {
604 // This is only a true indication that all tiles required for
605 // activation are initialized when no tiles are OOM. We need to
606 // wait for DidFinishRunningTasks() to be called, try to re-assign
607 // memory and in worst case use on-demand raster when tiles
608 // required for activation are OOM.
609 if (!all_tiles_required_for_activation_have_memory_
)
612 client_
->NotifyReadyToActivate();
615 void TileManager::GetTilesWithAssignedBins(PrioritizedTileSet
* tiles
) {
616 TRACE_EVENT0("cc", "TileManager::GetTilesWithAssignedBins");
618 // Compute new stats to be return by GetMemoryStats().
619 memory_required_bytes_
= 0;
620 memory_nice_to_have_bytes_
= 0;
622 const TileMemoryLimitPolicy memory_policy
= global_state_
.memory_limit_policy
;
623 const TreePriority tree_priority
= global_state_
.tree_priority
;
625 // For each tree, bin into different categories of tiles.
626 for (TileMap::const_iterator it
= tiles_
.begin(); it
!= tiles_
.end(); ++it
) {
627 Tile
* tile
= it
->second
;
628 ManagedTileState
& mts
= tile
->managed_state();
630 const ManagedTileState::TileVersion
& tile_version
=
631 tile
->GetTileVersionForDrawing();
632 bool tile_is_ready_to_draw
= tile_version
.IsReadyToDraw();
633 bool tile_is_active
= tile_is_ready_to_draw
||
634 mts
.tile_versions
[mts
.raster_mode
].raster_task_
;
636 // Get the active priority and bin.
637 TilePriority active_priority
= tile
->priority(ACTIVE_TREE
);
638 ManagedTileBin active_bin
= BinFromTilePriority(active_priority
);
640 // Get the pending priority and bin.
641 TilePriority pending_priority
= tile
->priority(PENDING_TREE
);
642 ManagedTileBin pending_bin
= BinFromTilePriority(pending_priority
);
644 bool pending_is_low_res
= pending_priority
.resolution
== LOW_RESOLUTION
;
645 bool pending_is_non_ideal
=
646 pending_priority
.resolution
== NON_IDEAL_RESOLUTION
;
647 bool active_is_non_ideal
=
648 active_priority
.resolution
== NON_IDEAL_RESOLUTION
;
650 // Adjust pending bin state for low res tiles. This prevents
651 // pending tree low-res tiles from being initialized before
653 if (pending_is_low_res
)
654 pending_bin
= std::max(pending_bin
, EVENTUALLY_BIN
);
656 // Adjust bin state based on if ready to draw.
657 active_bin
= kBinReadyToDrawMap
[tile_is_ready_to_draw
][active_bin
];
658 pending_bin
= kBinReadyToDrawMap
[tile_is_ready_to_draw
][pending_bin
];
660 // Adjust bin state based on if active.
661 active_bin
= kBinIsActiveMap
[tile_is_active
][active_bin
];
662 pending_bin
= kBinIsActiveMap
[tile_is_active
][pending_bin
];
664 // We never want to paint new non-ideal tiles, as we always have
665 // a high-res tile covering that content (paint that instead).
666 if (!tile_is_ready_to_draw
&& active_is_non_ideal
)
667 active_bin
= NEVER_BIN
;
668 if (!tile_is_ready_to_draw
&& pending_is_non_ideal
)
669 pending_bin
= NEVER_BIN
;
671 // Compute combined bin.
672 ManagedTileBin combined_bin
= std::min(active_bin
, pending_bin
);
674 if (!tile_is_ready_to_draw
|| tile_version
.requires_resource()) {
675 // The bin that the tile would have if the GPU memory manager had
676 // a maximally permissive policy, send to the GPU memory manager
677 // to determine policy.
678 ManagedTileBin gpu_memmgr_stats_bin
= combined_bin
;
679 if ((gpu_memmgr_stats_bin
== NOW_BIN
) ||
680 (gpu_memmgr_stats_bin
== NOW_AND_READY_TO_DRAW_BIN
))
681 memory_required_bytes_
+= BytesConsumedIfAllocated(tile
);
682 if (gpu_memmgr_stats_bin
!= NEVER_BIN
)
683 memory_nice_to_have_bytes_
+= BytesConsumedIfAllocated(tile
);
686 ManagedTileBin tree_bin
[NUM_TREES
];
687 tree_bin
[ACTIVE_TREE
] = kBinPolicyMap
[memory_policy
][active_bin
];
688 tree_bin
[PENDING_TREE
] = kBinPolicyMap
[memory_policy
][pending_bin
];
690 TilePriority tile_priority
;
691 switch (tree_priority
) {
692 case SAME_PRIORITY_FOR_BOTH_TREES
:
693 mts
.bin
= kBinPolicyMap
[memory_policy
][combined_bin
];
694 tile_priority
= tile
->combined_priority();
696 case SMOOTHNESS_TAKES_PRIORITY
:
697 mts
.bin
= tree_bin
[ACTIVE_TREE
];
698 tile_priority
= active_priority
;
700 case NEW_CONTENT_TAKES_PRIORITY
:
701 mts
.bin
= tree_bin
[PENDING_TREE
];
702 tile_priority
= pending_priority
;
706 // Bump up the priority if we determined it's NEVER_BIN on one tree,
707 // but is still required on the other tree.
708 bool is_in_never_bin_on_both_trees
= tree_bin
[ACTIVE_TREE
] == NEVER_BIN
&&
709 tree_bin
[PENDING_TREE
] == NEVER_BIN
;
711 if (mts
.bin
== NEVER_BIN
&& !is_in_never_bin_on_both_trees
)
712 mts
.bin
= tile_is_active
? AT_LAST_AND_ACTIVE_BIN
: AT_LAST_BIN
;
714 mts
.resolution
= tile_priority
.resolution
;
715 mts
.priority_bin
= tile_priority
.priority_bin
;
716 mts
.distance_to_visible
= tile_priority
.distance_to_visible
;
717 mts
.required_for_activation
= tile_priority
.required_for_activation
;
719 mts
.visible_and_ready_to_draw
=
720 tree_bin
[ACTIVE_TREE
] == NOW_AND_READY_TO_DRAW_BIN
;
722 // If the tile is in NEVER_BIN and it does not have an active task, then we
723 // can release the resources early. If it does have the task however, we
724 // should keep it in the prioritized tile set to ensure that AssignGpuMemory
726 if (mts
.bin
== NEVER_BIN
&&
727 !mts
.tile_versions
[mts
.raster_mode
].raster_task_
) {
728 FreeResourcesForTile(tile
);
732 // Insert the tile into a priority set.
733 tiles
->InsertTile(tile
, mts
.bin
);
737 void TileManager::ManageTiles(const GlobalStateThatImpactsTilePriority
& state
) {
738 TRACE_EVENT0("cc", "TileManager::ManageTiles");
740 // Update internal state.
741 if (state
!= global_state_
) {
742 global_state_
= state
;
743 prioritized_tiles_dirty_
= true;
744 // Soft limit is used for resource pool such that
745 // memory returns to soft limit after going over.
746 resource_pool_
->SetResourceUsageLimits(
747 global_state_
.soft_memory_limit_in_bytes
,
748 global_state_
.unused_memory_limit_in_bytes
,
749 global_state_
.num_resources_limit
);
752 // We need to call CheckForCompletedTasks() once in-between each call
753 // to ScheduleTasks() to prevent canceled tasks from being scheduled.
754 if (!did_check_for_completed_tasks_since_last_schedule_tasks_
) {
755 raster_worker_pool_delegate_
->CheckForCompletedTasks();
756 did_check_for_completed_tasks_since_last_schedule_tasks_
= true;
759 UpdatePrioritizedTileSetIfNeeded();
761 TileVector tiles_that_need_to_be_rasterized
;
762 AssignGpuMemoryToTiles(&prioritized_tiles_
,
763 &tiles_that_need_to_be_rasterized
);
765 // Finally, schedule rasterizer tasks.
766 ScheduleTasks(tiles_that_need_to_be_rasterized
);
768 TRACE_EVENT_INSTANT1("cc",
770 TRACE_EVENT_SCOPE_THREAD
,
772 TracedValue::FromValue(BasicStateAsValue().release()));
774 TRACE_COUNTER_ID1("cc",
775 "unused_memory_bytes",
777 resource_pool_
->total_memory_usage_bytes() -
778 resource_pool_
->acquired_memory_usage_bytes());
781 bool TileManager::UpdateVisibleTiles() {
782 TRACE_EVENT0("cc", "TileManager::UpdateVisibleTiles");
784 raster_worker_pool_delegate_
->CheckForCompletedTasks();
785 did_check_for_completed_tasks_since_last_schedule_tasks_
= true;
787 TRACE_EVENT_INSTANT1(
789 "DidUpdateVisibleTiles",
790 TRACE_EVENT_SCOPE_THREAD
,
792 TracedValue::FromValue(RasterTaskCompletionStatsAsValue(
793 update_visible_tiles_stats_
).release()));
794 update_visible_tiles_stats_
= RasterTaskCompletionStats();
796 bool did_initialize_visible_tile
= did_initialize_visible_tile_
;
797 did_initialize_visible_tile_
= false;
798 return did_initialize_visible_tile
;
801 void TileManager::GetMemoryStats(size_t* memory_required_bytes
,
802 size_t* memory_nice_to_have_bytes
,
803 size_t* memory_allocated_bytes
,
804 size_t* memory_used_bytes
) const {
805 *memory_required_bytes
= memory_required_bytes_
;
806 *memory_nice_to_have_bytes
= memory_nice_to_have_bytes_
;
807 *memory_allocated_bytes
= resource_pool_
->total_memory_usage_bytes();
808 *memory_used_bytes
= resource_pool_
->acquired_memory_usage_bytes();
811 scoped_ptr
<base::Value
> TileManager::BasicStateAsValue() const {
812 scoped_ptr
<base::DictionaryValue
> state(new base::DictionaryValue());
813 state
->SetInteger("tile_count", tiles_
.size());
814 state
->Set("global_state", global_state_
.AsValue().release());
815 state
->Set("memory_requirements", GetMemoryRequirementsAsValue().release());
816 return state
.PassAs
<base::Value
>();
819 scoped_ptr
<base::Value
> TileManager::AllTilesAsValue() const {
820 scoped_ptr
<base::ListValue
> state(new base::ListValue());
821 for (TileMap::const_iterator it
= tiles_
.begin(); it
!= tiles_
.end(); ++it
)
822 state
->Append(it
->second
->AsValue().release());
824 return state
.PassAs
<base::Value
>();
827 scoped_ptr
<base::Value
> TileManager::GetMemoryRequirementsAsValue() const {
828 scoped_ptr
<base::DictionaryValue
> requirements(new base::DictionaryValue());
830 size_t memory_required_bytes
;
831 size_t memory_nice_to_have_bytes
;
832 size_t memory_allocated_bytes
;
833 size_t memory_used_bytes
;
834 GetMemoryStats(&memory_required_bytes
,
835 &memory_nice_to_have_bytes
,
836 &memory_allocated_bytes
,
838 requirements
->SetInteger("memory_required_bytes", memory_required_bytes
);
839 requirements
->SetInteger("memory_nice_to_have_bytes",
840 memory_nice_to_have_bytes
);
841 requirements
->SetInteger("memory_allocated_bytes", memory_allocated_bytes
);
842 requirements
->SetInteger("memory_used_bytes", memory_used_bytes
);
843 return requirements
.PassAs
<base::Value
>();
846 void TileManager::AssignGpuMemoryToTiles(
847 PrioritizedTileSet
* tiles
,
848 TileVector
* tiles_that_need_to_be_rasterized
) {
849 TRACE_EVENT0("cc", "TileManager::AssignGpuMemoryToTiles");
851 // Maintain the list of released resources that can potentially be re-used
853 // If this operation becomes expensive too, only do this after some
854 // resource(s) was returned. Note that in that case, one also need to
855 // invalidate when releasing some resource from the pool.
856 resource_pool_
->CheckBusyResources();
858 // Now give memory out to the tiles until we're out, and build
859 // the needs-to-be-rasterized queue.
860 all_tiles_that_need_to_be_rasterized_have_memory_
= true;
861 all_tiles_required_for_activation_have_memory_
= true;
863 // Cast to prevent overflow.
864 int64 soft_bytes_available
=
865 static_cast<int64
>(bytes_releasable_
) +
866 static_cast<int64
>(global_state_
.soft_memory_limit_in_bytes
) -
867 static_cast<int64
>(resource_pool_
->acquired_memory_usage_bytes());
868 int64 hard_bytes_available
=
869 static_cast<int64
>(bytes_releasable_
) +
870 static_cast<int64
>(global_state_
.hard_memory_limit_in_bytes
) -
871 static_cast<int64
>(resource_pool_
->acquired_memory_usage_bytes());
872 int resources_available
= resources_releasable_
+
873 global_state_
.num_resources_limit
-
874 resource_pool_
->acquired_resource_count();
875 size_t soft_bytes_allocatable
=
876 std::max(static_cast<int64
>(0), soft_bytes_available
);
877 size_t hard_bytes_allocatable
=
878 std::max(static_cast<int64
>(0), hard_bytes_available
);
879 size_t resources_allocatable
= std::max(0, resources_available
);
881 size_t bytes_that_exceeded_memory_budget
= 0;
882 size_t soft_bytes_left
= soft_bytes_allocatable
;
883 size_t hard_bytes_left
= hard_bytes_allocatable
;
885 size_t resources_left
= resources_allocatable
;
886 bool oomed_soft
= false;
887 bool oomed_hard
= false;
888 bool have_hit_soft_memory
= false; // Soft memory comes after hard.
890 // Memory we assign to raster tasks now will be deducted from our memory
891 // in future iterations if priorities change. By assigning at most half
892 // the raster limit, we will always have another 50% left even if priorities
893 // change completely (assuming we check for completed/cancelled rasters
894 // between each call to this function).
895 size_t max_raster_bytes
= max_raster_usage_bytes_
/ 2;
896 size_t raster_bytes
= 0;
898 unsigned schedule_priority
= 1u;
899 for (PrioritizedTileSet::Iterator
it(tiles
, true); it
; ++it
) {
901 ManagedTileState
& mts
= tile
->managed_state();
903 mts
.scheduled_priority
= schedule_priority
++;
905 mts
.raster_mode
= tile
->DetermineOverallRasterMode();
907 ManagedTileState::TileVersion
& tile_version
=
908 mts
.tile_versions
[mts
.raster_mode
];
910 // If this tile doesn't need a resource, then nothing to do.
911 if (!tile_version
.requires_resource())
914 // If the tile is not needed, free it up.
915 if (mts
.bin
== NEVER_BIN
) {
916 FreeResourcesForTile(tile
);
920 const bool tile_uses_hard_limit
= mts
.bin
<= NOW_BIN
;
921 const size_t bytes_if_allocated
= BytesConsumedIfAllocated(tile
);
922 const size_t raster_bytes_if_rastered
= raster_bytes
+ bytes_if_allocated
;
923 const size_t tile_bytes_left
=
924 (tile_uses_hard_limit
) ? hard_bytes_left
: soft_bytes_left
;
926 // Hard-limit is reserved for tiles that would cause a calamity
927 // if they were to go away, so by definition they are the highest
928 // priority memory, and must be at the front of the list.
929 DCHECK(!(have_hit_soft_memory
&& tile_uses_hard_limit
));
930 have_hit_soft_memory
|= !tile_uses_hard_limit
;
932 size_t tile_bytes
= 0;
933 size_t tile_resources
= 0;
935 // It costs to maintain a resource.
936 for (int mode
= 0; mode
< NUM_RASTER_MODES
; ++mode
) {
937 if (mts
.tile_versions
[mode
].resource_
) {
938 tile_bytes
+= bytes_if_allocated
;
943 // Allow lower priority tiles with initialized resources to keep
944 // their memory by only assigning memory to new raster tasks if
945 // they can be scheduled.
946 if (raster_bytes_if_rastered
<= max_raster_bytes
) {
947 // If we don't have the required version, and it's not in flight
948 // then we'll have to pay to create a new task.
949 if (!tile_version
.resource_
&& !tile_version
.raster_task_
) {
950 tile_bytes
+= bytes_if_allocated
;
956 if (tile_bytes
> tile_bytes_left
|| tile_resources
> resources_left
) {
957 FreeResourcesForTile(tile
);
959 // This tile was already on screen and now its resources have been
960 // released. In order to prevent checkerboarding, set this tile as
961 // rasterize on demand immediately.
962 if (mts
.visible_and_ready_to_draw
&& use_rasterize_on_demand_
)
963 tile_version
.set_rasterize_on_demand();
966 if (tile_uses_hard_limit
) {
968 bytes_that_exceeded_memory_budget
+= tile_bytes
;
971 resources_left
-= tile_resources
;
972 hard_bytes_left
-= tile_bytes
;
974 (soft_bytes_left
> tile_bytes
) ? soft_bytes_left
- tile_bytes
: 0;
975 if (tile_version
.resource_
)
979 DCHECK(!tile_version
.resource_
);
981 // Tile shouldn't be rasterized if |tiles_that_need_to_be_rasterized|
982 // has reached it's limit or we've failed to assign gpu memory to this
983 // or any higher priority tile. Preventing tiles that fit into memory
984 // budget to be rasterized when higher priority tile is oom is
985 // important for two reasons:
986 // 1. Tile size should not impact raster priority.
987 // 2. Tiles with existing raster task could otherwise incorrectly
988 // be added as they are not affected by |bytes_allocatable|.
989 bool can_schedule_tile
=
990 !oomed_soft
&& raster_bytes_if_rastered
<= max_raster_bytes
&&
991 tiles_that_need_to_be_rasterized
->size() < kScheduledRasterTasksLimit
;
993 if (!can_schedule_tile
) {
994 all_tiles_that_need_to_be_rasterized_have_memory_
= false;
995 if (tile
->required_for_activation())
996 all_tiles_required_for_activation_have_memory_
= false;
997 it
.DisablePriorityOrdering();
1001 raster_bytes
= raster_bytes_if_rastered
;
1002 tiles_that_need_to_be_rasterized
->push_back(tile
);
1005 // OOM reporting uses hard-limit, soft-OOM is normal depending on limit.
1006 ever_exceeded_memory_budget_
|= oomed_hard
;
1007 if (ever_exceeded_memory_budget_
) {
1008 TRACE_COUNTER_ID2("cc",
1009 "over_memory_budget",
1012 global_state_
.hard_memory_limit_in_bytes
,
1014 bytes_that_exceeded_memory_budget
);
1016 memory_stats_from_last_assign_
.total_budget_in_bytes
=
1017 global_state_
.hard_memory_limit_in_bytes
;
1018 memory_stats_from_last_assign_
.bytes_allocated
=
1019 hard_bytes_allocatable
- hard_bytes_left
;
1020 memory_stats_from_last_assign_
.bytes_unreleasable
=
1021 hard_bytes_allocatable
- bytes_releasable_
;
1022 memory_stats_from_last_assign_
.bytes_over
= bytes_that_exceeded_memory_budget
;
1025 void TileManager::FreeResourceForTile(Tile
* tile
, RasterMode mode
) {
1026 ManagedTileState
& mts
= tile
->managed_state();
1027 if (mts
.tile_versions
[mode
].resource_
) {
1028 resource_pool_
->ReleaseResource(mts
.tile_versions
[mode
].resource_
.Pass());
1030 DCHECK_GE(bytes_releasable_
, BytesConsumedIfAllocated(tile
));
1031 DCHECK_GE(resources_releasable_
, 1u);
1033 bytes_releasable_
-= BytesConsumedIfAllocated(tile
);
1034 --resources_releasable_
;
1038 void TileManager::FreeResourcesForTile(Tile
* tile
) {
1039 for (int mode
= 0; mode
< NUM_RASTER_MODES
; ++mode
) {
1040 FreeResourceForTile(tile
, static_cast<RasterMode
>(mode
));
1044 void TileManager::FreeUnusedResourcesForTile(Tile
* tile
) {
1045 DCHECK(tile
->IsReadyToDraw());
1046 ManagedTileState
& mts
= tile
->managed_state();
1047 RasterMode used_mode
= HIGH_QUALITY_NO_LCD_RASTER_MODE
;
1048 for (int mode
= 0; mode
< NUM_RASTER_MODES
; ++mode
) {
1049 if (mts
.tile_versions
[mode
].IsReadyToDraw()) {
1050 used_mode
= static_cast<RasterMode
>(mode
);
1055 for (int mode
= 0; mode
< NUM_RASTER_MODES
; ++mode
) {
1056 if (mode
!= used_mode
)
1057 FreeResourceForTile(tile
, static_cast<RasterMode
>(mode
));
1061 void TileManager::ScheduleTasks(
1062 const TileVector
& tiles_that_need_to_be_rasterized
) {
1064 "TileManager::ScheduleTasks",
1066 tiles_that_need_to_be_rasterized
.size());
1068 DCHECK(did_check_for_completed_tasks_since_last_schedule_tasks_
);
1070 for (size_t i
= 0; i
< NUM_RASTER_WORKER_POOL_TYPES
; ++i
)
1071 raster_queue_
[i
].Reset();
1073 // Build a new task queue containing all task currently needed. Tasks
1074 // are added in order of priority, highest priority task first.
1075 for (TileVector::const_iterator it
= tiles_that_need_to_be_rasterized
.begin();
1076 it
!= tiles_that_need_to_be_rasterized
.end();
1079 ManagedTileState
& mts
= tile
->managed_state();
1080 ManagedTileState::TileVersion
& tile_version
=
1081 mts
.tile_versions
[mts
.raster_mode
];
1083 DCHECK(tile_version
.requires_resource());
1084 DCHECK(!tile_version
.resource_
);
1086 if (!tile_version
.raster_task_
)
1087 tile_version
.raster_task_
= CreateRasterTask(tile
);
1089 size_t pool_type
= tile
->use_gpu_rasterization()
1090 ? RASTER_WORKER_POOL_TYPE_DIRECT
1091 : RASTER_WORKER_POOL_TYPE_DEFAULT
;
1093 raster_queue_
[pool_type
].items
.push_back(RasterTaskQueue::Item(
1094 tile_version
.raster_task_
.get(), tile
->required_for_activation()));
1095 raster_queue_
[pool_type
].required_for_activation_count
+=
1096 tile
->required_for_activation();
1099 // We must reduce the amount of unused resoruces before calling
1100 // ScheduleTasks to prevent usage from rising above limits.
1101 resource_pool_
->ReduceResourceUsage();
1103 // Schedule running of |raster_tasks_|. This replaces any previously
1104 // scheduled tasks and effectively cancels all tasks not present
1105 // in |raster_tasks_|.
1106 raster_worker_pool_delegate_
->ScheduleTasks(raster_queue_
);
1108 // It's now safe to clean up orphan tasks as raster worker pool is not
1109 // allowed to keep around unreferenced raster tasks after ScheduleTasks() has
1111 orphan_raster_tasks_
.clear();
1113 did_check_for_completed_tasks_since_last_schedule_tasks_
= false;
1116 scoped_refptr
<internal::WorkerPoolTask
> TileManager::CreateImageDecodeTask(
1118 SkPixelRef
* pixel_ref
) {
1119 return make_scoped_refptr(new ImageDecodeWorkerPoolTaskImpl(
1122 rendering_stats_instrumentation_
,
1123 base::Bind(&TileManager::OnImageDecodeTaskCompleted
,
1124 base::Unretained(this),
1126 base::Unretained(pixel_ref
))));
1129 scoped_refptr
<internal::RasterWorkerPoolTask
> TileManager::CreateRasterTask(
1131 ManagedTileState
& mts
= tile
->managed_state();
1133 scoped_ptr
<ScopedResource
> resource
=
1134 resource_pool_
->AcquireResource(tile
->tile_size_
.size());
1135 const ScopedResource
* const_resource
= resource
.get();
1137 // Create and queue all image decode tasks that this tile depends on.
1138 internal::WorkerPoolTask::Vector decode_tasks
;
1139 PixelRefTaskMap
& existing_pixel_refs
= image_decode_tasks_
[tile
->layer_id()];
1140 for (PicturePileImpl::PixelRefIterator
iter(
1141 tile
->content_rect(), tile
->contents_scale(), tile
->picture_pile());
1144 SkPixelRef
* pixel_ref
= *iter
;
1145 uint32_t id
= pixel_ref
->getGenerationID();
1147 // Append existing image decode task if available.
1148 PixelRefTaskMap::iterator decode_task_it
= existing_pixel_refs
.find(id
);
1149 if (decode_task_it
!= existing_pixel_refs
.end()) {
1150 decode_tasks
.push_back(decode_task_it
->second
);
1154 // Create and append new image decode task for this pixel ref.
1155 scoped_refptr
<internal::WorkerPoolTask
> decode_task
=
1156 CreateImageDecodeTask(tile
, pixel_ref
);
1157 decode_tasks
.push_back(decode_task
);
1158 existing_pixel_refs
[id
] = decode_task
;
1161 // We analyze picture before rasterization to detect solid-color tiles.
1162 // If the tile is detected as such there is no need to raster or upload.
1163 // It is drawn directly as a solid-color quad saving raster and upload cost.
1164 // The analysis step is however expensive and is not justified when doing
1165 // gpu rasterization where there is no upload.
1166 bool analyze_picture
= !tile
->use_gpu_rasterization();
1168 return make_scoped_refptr(new RasterWorkerPoolTaskImpl(
1170 tile
->picture_pile(),
1171 tile
->content_rect(),
1172 tile
->contents_scale(),
1176 static_cast<const void*>(tile
),
1177 tile
->source_frame_number(),
1179 rendering_stats_instrumentation_
,
1180 base::Bind(&TileManager::OnRasterTaskCompleted
,
1181 base::Unretained(this),
1183 base::Passed(&resource
),
1188 void TileManager::OnImageDecodeTaskCompleted(int layer_id
,
1189 SkPixelRef
* pixel_ref
,
1190 bool was_canceled
) {
1191 // If the task was canceled, we need to clean it up
1192 // from |image_decode_tasks_|.
1196 LayerPixelRefTaskMap::iterator layer_it
= image_decode_tasks_
.find(layer_id
);
1198 if (layer_it
== image_decode_tasks_
.end())
1201 PixelRefTaskMap
& pixel_ref_tasks
= layer_it
->second
;
1202 PixelRefTaskMap::iterator task_it
=
1203 pixel_ref_tasks
.find(pixel_ref
->getGenerationID());
1205 if (task_it
!= pixel_ref_tasks
.end())
1206 pixel_ref_tasks
.erase(task_it
);
1209 void TileManager::OnRasterTaskCompleted(
1211 scoped_ptr
<ScopedResource
> resource
,
1212 RasterMode raster_mode
,
1213 const PicturePileImpl::Analysis
& analysis
,
1214 bool was_canceled
) {
1215 TileMap::iterator it
= tiles_
.find(tile_id
);
1216 if (it
== tiles_
.end()) {
1217 ++update_visible_tiles_stats_
.canceled_count
;
1218 resource_pool_
->ReleaseResource(resource
.Pass());
1222 Tile
* tile
= it
->second
;
1223 ManagedTileState
& mts
= tile
->managed_state();
1224 ManagedTileState::TileVersion
& tile_version
= mts
.tile_versions
[raster_mode
];
1225 DCHECK(tile_version
.raster_task_
);
1226 orphan_raster_tasks_
.push_back(tile_version
.raster_task_
);
1227 tile_version
.raster_task_
= NULL
;
1230 ++update_visible_tiles_stats_
.canceled_count
;
1231 resource_pool_
->ReleaseResource(resource
.Pass());
1235 ++update_visible_tiles_stats_
.completed_count
;
1237 tile_version
.set_has_text(analysis
.has_text
);
1238 if (analysis
.is_solid_color
) {
1239 tile_version
.set_solid_color(analysis
.solid_color
);
1240 resource_pool_
->ReleaseResource(resource
.Pass());
1242 tile_version
.set_use_resource();
1243 tile_version
.resource_
= resource
.Pass();
1245 bytes_releasable_
+= BytesConsumedIfAllocated(tile
);
1246 ++resources_releasable_
;
1249 FreeUnusedResourcesForTile(tile
);
1250 if (tile
->priority(ACTIVE_TREE
).distance_to_visible
== 0.f
)
1251 did_initialize_visible_tile_
= true;
1254 scoped_refptr
<Tile
> TileManager::CreateTile(PicturePileImpl
* picture_pile
,
1255 const gfx::Size
& tile_size
,
1256 const gfx::Rect
& content_rect
,
1257 const gfx::Rect
& opaque_rect
,
1258 float contents_scale
,
1260 int source_frame_number
,
1262 scoped_refptr
<Tile
> tile
= make_scoped_refptr(new Tile(this,
1269 source_frame_number
,
1271 DCHECK(tiles_
.find(tile
->id()) == tiles_
.end());
1273 tiles_
[tile
->id()] = tile
;
1274 used_layer_counts_
[tile
->layer_id()]++;
1275 prioritized_tiles_dirty_
= true;
1279 void TileManager::RegisterPictureLayerImpl(PictureLayerImpl
* layer
) {
1280 DCHECK(std::find(layers_
.begin(), layers_
.end(), layer
) == layers_
.end());
1281 layers_
.push_back(layer
);
1284 void TileManager::UnregisterPictureLayerImpl(PictureLayerImpl
* layer
) {
1285 std::vector
<PictureLayerImpl
*>::iterator it
=
1286 std::find(layers_
.begin(), layers_
.end(), layer
);
1287 DCHECK(it
!= layers_
.end());
1291 void TileManager::GetPairedPictureLayers(
1292 std::vector
<PairedPictureLayer
>* paired_layers
) const {
1293 paired_layers
->clear();
1294 // Reserve a maximum possible paired layers.
1295 paired_layers
->reserve(layers_
.size());
1297 for (std::vector
<PictureLayerImpl
*>::const_iterator it
= layers_
.begin();
1298 it
!= layers_
.end();
1300 PictureLayerImpl
* layer
= *it
;
1302 // This is a recycle tree layer, so it shouldn't be included in the raster
1304 // TODO(vmpstr): We need these layers for eviction, so they should probably
1305 // go into a separate vector as an output.
1306 if (!layer
->IsOnActiveOrPendingTree())
1309 PictureLayerImpl
* twin_layer
= layer
->GetTwinLayer();
1311 // If the twin layer is recycled, it is not a valid twin.
1312 if (twin_layer
&& !twin_layer
->IsOnActiveOrPendingTree())
1315 PairedPictureLayer paired_layer
;
1316 WhichTree tree
= layer
->GetTree();
1318 // If the current tree is ACTIVE_TREE, then always generate a paired_layer.
1319 // If current tree is PENDING_TREE, then only generate a paired_layer if
1320 // there is no twin layer.
1321 if (tree
== ACTIVE_TREE
) {
1322 DCHECK(!twin_layer
|| twin_layer
->GetTree() == PENDING_TREE
);
1323 paired_layer
.active_layer
= layer
;
1324 paired_layer
.pending_layer
= twin_layer
;
1325 paired_layers
->push_back(paired_layer
);
1326 } else if (!twin_layer
) {
1327 paired_layer
.active_layer
= NULL
;
1328 paired_layer
.pending_layer
= layer
;
1329 paired_layers
->push_back(paired_layer
);
1334 TileManager::PairedPictureLayer::PairedPictureLayer()
1335 : active_layer(NULL
), pending_layer(NULL
) {}
1337 TileManager::PairedPictureLayer::~PairedPictureLayer() {}
1339 TileManager::RasterTileIterator::RasterTileIterator(TileManager
* tile_manager
,
1340 TreePriority tree_priority
)
1341 : tree_priority_(tree_priority
), comparator_(tree_priority
) {
1342 std::vector
<TileManager::PairedPictureLayer
> paired_layers
;
1343 tile_manager
->GetPairedPictureLayers(&paired_layers
);
1344 bool prioritize_low_res
= tree_priority_
== SMOOTHNESS_TAKES_PRIORITY
;
1346 paired_iterators_
.reserve(paired_layers
.size());
1347 iterator_heap_
.reserve(paired_layers
.size());
1348 for (std::vector
<TileManager::PairedPictureLayer
>::iterator it
=
1349 paired_layers
.begin();
1350 it
!= paired_layers
.end();
1352 PairedPictureLayerIterator paired_iterator
;
1353 if (it
->active_layer
) {
1354 paired_iterator
.active_iterator
=
1355 PictureLayerImpl::LayerRasterTileIterator(it
->active_layer
,
1356 prioritize_low_res
);
1359 if (it
->pending_layer
) {
1360 paired_iterator
.pending_iterator
=
1361 PictureLayerImpl::LayerRasterTileIterator(it
->pending_layer
,
1362 prioritize_low_res
);
1365 if (paired_iterator
.PeekTile(tree_priority_
) != NULL
) {
1366 paired_iterators_
.push_back(paired_iterator
);
1367 iterator_heap_
.push_back(&paired_iterators_
.back());
1371 std::make_heap(iterator_heap_
.begin(), iterator_heap_
.end(), comparator_
);
1374 TileManager::RasterTileIterator::~RasterTileIterator() {}
1376 TileManager::RasterTileIterator
& TileManager::RasterTileIterator::operator++() {
1379 std::pop_heap(iterator_heap_
.begin(), iterator_heap_
.end(), comparator_
);
1380 PairedPictureLayerIterator
* paired_iterator
= iterator_heap_
.back();
1381 iterator_heap_
.pop_back();
1383 paired_iterator
->PopTile(tree_priority_
);
1384 if (paired_iterator
->PeekTile(tree_priority_
) != NULL
) {
1385 iterator_heap_
.push_back(paired_iterator
);
1386 std::push_heap(iterator_heap_
.begin(), iterator_heap_
.end(), comparator_
);
1391 TileManager::RasterTileIterator::operator bool() const {
1392 return !iterator_heap_
.empty();
1395 Tile
* TileManager::RasterTileIterator::operator*() {
1397 return iterator_heap_
.front()->PeekTile(tree_priority_
);
1400 TileManager::RasterTileIterator::PairedPictureLayerIterator::
1401 PairedPictureLayerIterator() {}
1403 TileManager::RasterTileIterator::PairedPictureLayerIterator::
1404 ~PairedPictureLayerIterator() {}
1406 Tile
* TileManager::RasterTileIterator::PairedPictureLayerIterator::PeekTile(
1407 TreePriority tree_priority
) {
1408 PictureLayerImpl::LayerRasterTileIterator
* next_iterator
=
1409 NextTileIterator(tree_priority
).first
;
1413 DCHECK(*next_iterator
);
1414 DCHECK(std::find(returned_shared_tiles
.begin(),
1415 returned_shared_tiles
.end(),
1416 **next_iterator
) == returned_shared_tiles
.end());
1417 return **next_iterator
;
1420 void TileManager::RasterTileIterator::PairedPictureLayerIterator::PopTile(
1421 TreePriority tree_priority
) {
1422 PictureLayerImpl::LayerRasterTileIterator
* next_iterator
=
1423 NextTileIterator(tree_priority
).first
;
1424 DCHECK(next_iterator
);
1425 DCHECK(*next_iterator
);
1426 returned_shared_tiles
.push_back(**next_iterator
);
1429 next_iterator
= NextTileIterator(tree_priority
).first
;
1430 while (next_iterator
&&
1431 std::find(returned_shared_tiles
.begin(),
1432 returned_shared_tiles
.end(),
1433 **next_iterator
) != returned_shared_tiles
.end()) {
1435 next_iterator
= NextTileIterator(tree_priority
).first
;
1439 std::pair
<PictureLayerImpl::LayerRasterTileIterator
*, WhichTree
>
1440 TileManager::RasterTileIterator::PairedPictureLayerIterator::NextTileIterator(
1441 TreePriority tree_priority
) {
1442 // If both iterators are out of tiles, return NULL.
1443 if (!active_iterator
&& !pending_iterator
) {
1444 return std::pair
<PictureLayerImpl::LayerRasterTileIterator
*, WhichTree
>(
1448 // If we only have one iterator with tiles, return it.
1449 if (!active_iterator
)
1450 return std::make_pair(&pending_iterator
, PENDING_TREE
);
1451 if (!pending_iterator
)
1452 return std::make_pair(&active_iterator
, ACTIVE_TREE
);
1454 // Now both iterators have tiles, so we have to decide based on tree priority.
1455 switch (tree_priority
) {
1456 case SMOOTHNESS_TAKES_PRIORITY
:
1457 return std::make_pair(&active_iterator
, ACTIVE_TREE
);
1458 case NEW_CONTENT_TAKES_PRIORITY
:
1459 return std::make_pair(&pending_iterator
, ACTIVE_TREE
);
1460 case SAME_PRIORITY_FOR_BOTH_TREES
: {
1461 Tile
* active_tile
= *active_iterator
;
1462 Tile
* pending_tile
= *pending_iterator
;
1463 if (active_tile
== pending_tile
)
1464 return std::make_pair(&active_iterator
, ACTIVE_TREE
);
1466 const TilePriority
& active_priority
= active_tile
->priority(ACTIVE_TREE
);
1467 const TilePriority
& pending_priority
=
1468 pending_tile
->priority(PENDING_TREE
);
1470 if (active_priority
.IsHigherPriorityThan(pending_priority
))
1471 return std::make_pair(&active_iterator
, ACTIVE_TREE
);
1472 return std::make_pair(&pending_iterator
, PENDING_TREE
);
1477 // Keep the compiler happy.
1478 return std::pair
<PictureLayerImpl::LayerRasterTileIterator
*, WhichTree
>(
1482 TileManager::RasterTileIterator::RasterOrderComparator::RasterOrderComparator(
1483 TreePriority tree_priority
)
1484 : tree_priority_(tree_priority
) {}
1486 bool TileManager::RasterTileIterator::RasterOrderComparator::ComparePriorities(
1487 const TilePriority
& a_priority
,
1488 const TilePriority
& b_priority
,
1489 bool prioritize_low_res
) const {
1490 if (b_priority
.resolution
!= a_priority
.resolution
) {
1491 return (prioritize_low_res
&& b_priority
.resolution
== LOW_RESOLUTION
) ||
1492 (!prioritize_low_res
&& b_priority
.resolution
== HIGH_RESOLUTION
) ||
1493 (a_priority
.resolution
== NON_IDEAL_RESOLUTION
);
1496 return b_priority
.IsHigherPriorityThan(a_priority
);
1499 bool TileManager::RasterTileIterator::RasterOrderComparator::operator()(
1500 PairedPictureLayerIterator
* a
,
1501 PairedPictureLayerIterator
* b
) const {
1502 std::pair
<PictureLayerImpl::LayerRasterTileIterator
*, WhichTree
> a_pair
=
1503 a
->NextTileIterator(tree_priority_
);
1504 DCHECK(a_pair
.first
);
1505 DCHECK(*a_pair
.first
);
1507 std::pair
<PictureLayerImpl::LayerRasterTileIterator
*, WhichTree
> b_pair
=
1508 b
->NextTileIterator(tree_priority_
);
1509 DCHECK(b_pair
.first
);
1510 DCHECK(*b_pair
.first
);
1512 Tile
* a_tile
= **a_pair
.first
;
1513 Tile
* b_tile
= **b_pair
.first
;
1515 switch (tree_priority_
) {
1516 case SMOOTHNESS_TAKES_PRIORITY
:
1517 return ComparePriorities(a_tile
->priority(ACTIVE_TREE
),
1518 b_tile
->priority(ACTIVE_TREE
),
1519 true /* prioritize low res */);
1520 case NEW_CONTENT_TAKES_PRIORITY
:
1521 return ComparePriorities(a_tile
->priority(PENDING_TREE
),
1522 b_tile
->priority(PENDING_TREE
),
1523 false /* prioritize low res */);
1524 case SAME_PRIORITY_FOR_BOTH_TREES
:
1525 return ComparePriorities(a_tile
->priority(a_pair
.second
),
1526 b_tile
->priority(b_pair
.second
),
1527 false /* prioritize low res */);
1531 // Keep the compiler happy.