Update mojo surfaces bindings and mojo/cc/ glue
[chromium-blink-merge.git] / cc / resources / tile_manager.cc
blob3e35dbeed787a7c6a3b34629b845d56879a5c0ca
1 // Copyright 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "cc/resources/tile_manager.h"
7 #include <algorithm>
8 #include <limits>
9 #include <string>
11 #include "base/bind.h"
12 #include "base/debug/trace_event_argument.h"
13 #include "base/json/json_writer.h"
14 #include "base/logging.h"
15 #include "base/metrics/histogram.h"
16 #include "cc/debug/devtools_instrumentation.h"
17 #include "cc/debug/frame_viewer_instrumentation.h"
18 #include "cc/debug/traced_value.h"
19 #include "cc/layers/picture_layer_impl.h"
20 #include "cc/resources/rasterizer.h"
21 #include "cc/resources/tile.h"
22 #include "skia/ext/paint_simplifier.h"
23 #include "third_party/skia/include/core/SkBitmap.h"
24 #include "third_party/skia/include/core/SkPixelRef.h"
25 #include "ui/gfx/rect_conversions.h"
27 namespace cc {
28 namespace {
30 // Flag to indicate whether we should try and detect that
31 // a tile is of solid color.
32 const bool kUseColorEstimator = true;
34 class RasterTaskImpl : public RasterTask {
35 public:
36 RasterTaskImpl(
37 const Resource* resource,
38 PicturePileImpl* picture_pile,
39 const gfx::Rect& content_rect,
40 float contents_scale,
41 RasterMode raster_mode,
42 TileResolution tile_resolution,
43 int layer_id,
44 const void* tile_id,
45 int source_frame_number,
46 bool analyze_picture,
47 RenderingStatsInstrumentation* rendering_stats,
48 const base::Callback<void(const PicturePileImpl::Analysis&, bool)>& reply,
49 ImageDecodeTask::Vector* dependencies)
50 : RasterTask(resource, dependencies),
51 picture_pile_(picture_pile),
52 content_rect_(content_rect),
53 contents_scale_(contents_scale),
54 raster_mode_(raster_mode),
55 tile_resolution_(tile_resolution),
56 layer_id_(layer_id),
57 tile_id_(tile_id),
58 source_frame_number_(source_frame_number),
59 analyze_picture_(analyze_picture),
60 rendering_stats_(rendering_stats),
61 reply_(reply),
62 canvas_(NULL) {}
64 // Overridden from Task:
65 virtual void RunOnWorkerThread() OVERRIDE {
66 TRACE_EVENT0("cc", "RasterizerTaskImpl::RunOnWorkerThread");
68 DCHECK(picture_pile_);
69 if (!canvas_)
70 return;
72 if (analyze_picture_) {
73 Analyze(picture_pile_.get());
74 if (analysis_.is_solid_color)
75 return;
78 Raster(picture_pile_.get());
81 // Overridden from RasterizerTask:
82 virtual void ScheduleOnOriginThread(RasterizerTaskClient* client) OVERRIDE {
83 DCHECK(!canvas_);
84 canvas_ = client->AcquireCanvasForRaster(this);
86 virtual void CompleteOnOriginThread(RasterizerTaskClient* client) OVERRIDE {
87 canvas_ = NULL;
88 client->ReleaseCanvasForRaster(this);
90 virtual void RunReplyOnOriginThread() OVERRIDE {
91 DCHECK(!canvas_);
92 reply_.Run(analysis_, !HasFinishedRunning());
95 protected:
96 virtual ~RasterTaskImpl() { DCHECK(!canvas_); }
98 private:
99 void Analyze(const PicturePileImpl* picture_pile) {
100 frame_viewer_instrumentation::ScopedAnalyzeTask analyze_task(
101 tile_id_, tile_resolution_, source_frame_number_, layer_id_);
103 DCHECK(picture_pile);
105 picture_pile->AnalyzeInRect(
106 content_rect_, contents_scale_, &analysis_, rendering_stats_);
108 // Record the solid color prediction.
109 UMA_HISTOGRAM_BOOLEAN("Renderer4.SolidColorTilesAnalyzed",
110 analysis_.is_solid_color);
112 // Clear the flag if we're not using the estimator.
113 analysis_.is_solid_color &= kUseColorEstimator;
116 void Raster(const PicturePileImpl* picture_pile) {
117 frame_viewer_instrumentation::ScopedRasterTask raster_task(
118 tile_id_,
119 tile_resolution_,
120 source_frame_number_,
121 layer_id_,
122 raster_mode_);
123 devtools_instrumentation::ScopedLayerTask layer_task(
124 devtools_instrumentation::kRasterTask, layer_id_);
126 skia::RefPtr<SkDrawFilter> draw_filter;
127 switch (raster_mode_) {
128 case LOW_QUALITY_RASTER_MODE:
129 draw_filter = skia::AdoptRef(new skia::PaintSimplifier);
130 break;
131 case HIGH_QUALITY_RASTER_MODE:
132 break;
133 case NUM_RASTER_MODES:
134 default:
135 NOTREACHED();
137 canvas_->setDrawFilter(draw_filter.get());
139 base::TimeDelta prev_rasterize_time =
140 rendering_stats_->impl_thread_rendering_stats().rasterize_time;
142 // Only record rasterization time for highres tiles, because
143 // lowres tiles are not required for activation and therefore
144 // introduce noise in the measurement (sometimes they get rasterized
145 // before we draw and sometimes they aren't)
146 RenderingStatsInstrumentation* stats =
147 tile_resolution_ == HIGH_RESOLUTION ? rendering_stats_ : NULL;
148 DCHECK(picture_pile);
149 picture_pile->RasterToBitmap(
150 canvas_, content_rect_, contents_scale_, stats);
152 if (rendering_stats_->record_rendering_stats()) {
153 base::TimeDelta current_rasterize_time =
154 rendering_stats_->impl_thread_rendering_stats().rasterize_time;
155 HISTOGRAM_CUSTOM_COUNTS(
156 "Renderer4.PictureRasterTimeUS",
157 (current_rasterize_time - prev_rasterize_time).InMicroseconds(),
159 100000,
160 100);
164 PicturePileImpl::Analysis analysis_;
165 scoped_refptr<PicturePileImpl> picture_pile_;
166 gfx::Rect content_rect_;
167 float contents_scale_;
168 RasterMode raster_mode_;
169 TileResolution tile_resolution_;
170 int layer_id_;
171 const void* tile_id_;
172 int source_frame_number_;
173 bool analyze_picture_;
174 RenderingStatsInstrumentation* rendering_stats_;
175 const base::Callback<void(const PicturePileImpl::Analysis&, bool)> reply_;
176 SkCanvas* canvas_;
178 DISALLOW_COPY_AND_ASSIGN(RasterTaskImpl);
181 class ImageDecodeTaskImpl : public ImageDecodeTask {
182 public:
183 ImageDecodeTaskImpl(SkPixelRef* pixel_ref,
184 int layer_id,
185 RenderingStatsInstrumentation* rendering_stats,
186 const base::Callback<void(bool was_canceled)>& reply)
187 : pixel_ref_(skia::SharePtr(pixel_ref)),
188 layer_id_(layer_id),
189 rendering_stats_(rendering_stats),
190 reply_(reply) {}
192 // Overridden from Task:
193 virtual void RunOnWorkerThread() OVERRIDE {
194 TRACE_EVENT0("cc", "ImageDecodeTaskImpl::RunOnWorkerThread");
196 devtools_instrumentation::ScopedImageDecodeTask image_decode_task(
197 pixel_ref_.get());
198 // This will cause the image referred to by pixel ref to be decoded.
199 pixel_ref_->lockPixels();
200 pixel_ref_->unlockPixels();
203 // Overridden from RasterizerTask:
204 virtual void ScheduleOnOriginThread(RasterizerTaskClient* client) OVERRIDE {}
205 virtual void CompleteOnOriginThread(RasterizerTaskClient* client) OVERRIDE {}
206 virtual void RunReplyOnOriginThread() OVERRIDE {
207 reply_.Run(!HasFinishedRunning());
210 protected:
211 virtual ~ImageDecodeTaskImpl() {}
213 private:
214 skia::RefPtr<SkPixelRef> pixel_ref_;
215 int layer_id_;
216 RenderingStatsInstrumentation* rendering_stats_;
217 const base::Callback<void(bool was_canceled)> reply_;
219 DISALLOW_COPY_AND_ASSIGN(ImageDecodeTaskImpl);
222 const size_t kScheduledRasterTasksLimit = 32u;
224 // Memory limit policy works by mapping some bin states to the NEVER bin.
225 const ManagedTileBin kBinPolicyMap[NUM_TILE_MEMORY_LIMIT_POLICIES][NUM_BINS] = {
226 // [ALLOW_NOTHING]
227 {NEVER_BIN, // [NOW_AND_READY_TO_DRAW_BIN]
228 NEVER_BIN, // [NOW_BIN]
229 NEVER_BIN, // [SOON_BIN]
230 NEVER_BIN, // [EVENTUALLY_AND_ACTIVE_BIN]
231 NEVER_BIN, // [EVENTUALLY_BIN]
232 NEVER_BIN, // [AT_LAST_AND_ACTIVE_BIN]
233 NEVER_BIN, // [AT_LAST_BIN]
234 NEVER_BIN // [NEVER_BIN]
236 // [ALLOW_ABSOLUTE_MINIMUM]
237 {NOW_AND_READY_TO_DRAW_BIN, // [NOW_AND_READY_TO_DRAW_BIN]
238 NOW_BIN, // [NOW_BIN]
239 NEVER_BIN, // [SOON_BIN]
240 NEVER_BIN, // [EVENTUALLY_AND_ACTIVE_BIN]
241 NEVER_BIN, // [EVENTUALLY_BIN]
242 NEVER_BIN, // [AT_LAST_AND_ACTIVE_BIN]
243 NEVER_BIN, // [AT_LAST_BIN]
244 NEVER_BIN // [NEVER_BIN]
246 // [ALLOW_PREPAINT_ONLY]
247 {NOW_AND_READY_TO_DRAW_BIN, // [NOW_AND_READY_TO_DRAW_BIN]
248 NOW_BIN, // [NOW_BIN]
249 SOON_BIN, // [SOON_BIN]
250 NEVER_BIN, // [EVENTUALLY_AND_ACTIVE_BIN]
251 NEVER_BIN, // [EVENTUALLY_BIN]
252 NEVER_BIN, // [AT_LAST_AND_ACTIVE_BIN]
253 NEVER_BIN, // [AT_LAST_BIN]
254 NEVER_BIN // [NEVER_BIN]
256 // [ALLOW_ANYTHING]
257 {NOW_AND_READY_TO_DRAW_BIN, // [NOW_AND_READY_TO_DRAW_BIN]
258 NOW_BIN, // [NOW_BIN]
259 SOON_BIN, // [SOON_BIN]
260 EVENTUALLY_AND_ACTIVE_BIN, // [EVENTUALLY_AND_ACTIVE_BIN]
261 EVENTUALLY_BIN, // [EVENTUALLY_BIN]
262 AT_LAST_AND_ACTIVE_BIN, // [AT_LAST_AND_ACTIVE_BIN]
263 AT_LAST_BIN, // [AT_LAST_BIN]
264 NEVER_BIN // [NEVER_BIN]
267 // Ready to draw works by mapping NOW_BIN to NOW_AND_READY_TO_DRAW_BIN.
268 const ManagedTileBin kBinReadyToDrawMap[2][NUM_BINS] = {
269 // Not ready
270 {NOW_AND_READY_TO_DRAW_BIN, // [NOW_AND_READY_TO_DRAW_BIN]
271 NOW_BIN, // [NOW_BIN]
272 SOON_BIN, // [SOON_BIN]
273 EVENTUALLY_AND_ACTIVE_BIN, // [EVENTUALLY_AND_ACTIVE_BIN]
274 EVENTUALLY_BIN, // [EVENTUALLY_BIN]
275 AT_LAST_AND_ACTIVE_BIN, // [AT_LAST_AND_ACTIVE_BIN]
276 AT_LAST_BIN, // [AT_LAST_BIN]
277 NEVER_BIN // [NEVER_BIN]
279 // Ready
280 {NOW_AND_READY_TO_DRAW_BIN, // [NOW_AND_READY_TO_DRAW_BIN]
281 NOW_AND_READY_TO_DRAW_BIN, // [NOW_BIN]
282 SOON_BIN, // [SOON_BIN]
283 EVENTUALLY_AND_ACTIVE_BIN, // [EVENTUALLY_AND_ACTIVE_BIN]
284 EVENTUALLY_BIN, // [EVENTUALLY_BIN]
285 AT_LAST_AND_ACTIVE_BIN, // [AT_LAST_AND_ACTIVE_BIN]
286 AT_LAST_BIN, // [AT_LAST_BIN]
287 NEVER_BIN // [NEVER_BIN]
290 // Active works by mapping some bin stats to equivalent _ACTIVE_BIN state.
291 const ManagedTileBin kBinIsActiveMap[2][NUM_BINS] = {
292 // Inactive
293 {NOW_AND_READY_TO_DRAW_BIN, // [NOW_AND_READY_TO_DRAW_BIN]
294 NOW_BIN, // [NOW_BIN]
295 SOON_BIN, // [SOON_BIN]
296 EVENTUALLY_AND_ACTIVE_BIN, // [EVENTUALLY_AND_ACTIVE_BIN]
297 EVENTUALLY_BIN, // [EVENTUALLY_BIN]
298 AT_LAST_AND_ACTIVE_BIN, // [AT_LAST_AND_ACTIVE_BIN]
299 AT_LAST_BIN, // [AT_LAST_BIN]
300 NEVER_BIN // [NEVER_BIN]
302 // Active
303 {NOW_AND_READY_TO_DRAW_BIN, // [NOW_AND_READY_TO_DRAW_BIN]
304 NOW_BIN, // [NOW_BIN]
305 SOON_BIN, // [SOON_BIN]
306 EVENTUALLY_AND_ACTIVE_BIN, // [EVENTUALLY_AND_ACTIVE_BIN]
307 EVENTUALLY_AND_ACTIVE_BIN, // [EVENTUALLY_BIN]
308 AT_LAST_AND_ACTIVE_BIN, // [AT_LAST_AND_ACTIVE_BIN]
309 AT_LAST_AND_ACTIVE_BIN, // [AT_LAST_BIN]
310 NEVER_BIN // [NEVER_BIN]
313 // Determine bin based on three categories of tiles: things we need now,
314 // things we need soon, and eventually.
315 inline ManagedTileBin BinFromTilePriority(const TilePriority& prio) {
316 if (prio.priority_bin == TilePriority::NOW)
317 return NOW_BIN;
319 if (prio.priority_bin == TilePriority::SOON)
320 return SOON_BIN;
322 if (prio.distance_to_visible == std::numeric_limits<float>::infinity())
323 return NEVER_BIN;
325 return EVENTUALLY_BIN;
328 } // namespace
330 RasterTaskCompletionStats::RasterTaskCompletionStats()
331 : completed_count(0u), canceled_count(0u) {}
333 scoped_refptr<base::debug::ConvertableToTraceFormat>
334 RasterTaskCompletionStatsAsValue(const RasterTaskCompletionStats& stats) {
335 scoped_refptr<base::debug::TracedValue> state =
336 new base::debug::TracedValue();
337 state->SetInteger("completed_count", stats.completed_count);
338 state->SetInteger("canceled_count", stats.canceled_count);
339 return state;
342 // static
343 scoped_ptr<TileManager> TileManager::Create(
344 TileManagerClient* client,
345 base::SequencedTaskRunner* task_runner,
346 ResourcePool* resource_pool,
347 Rasterizer* rasterizer,
348 RenderingStatsInstrumentation* rendering_stats_instrumentation) {
349 return make_scoped_ptr(new TileManager(client,
350 task_runner,
351 resource_pool,
352 rasterizer,
353 rendering_stats_instrumentation));
356 TileManager::TileManager(
357 TileManagerClient* client,
358 base::SequencedTaskRunner* task_runner,
359 ResourcePool* resource_pool,
360 Rasterizer* rasterizer,
361 RenderingStatsInstrumentation* rendering_stats_instrumentation)
362 : client_(client),
363 task_runner_(task_runner),
364 resource_pool_(resource_pool),
365 rasterizer_(rasterizer),
366 prioritized_tiles_dirty_(false),
367 all_tiles_that_need_to_be_rasterized_have_memory_(true),
368 all_tiles_required_for_activation_have_memory_(true),
369 bytes_releasable_(0),
370 resources_releasable_(0),
371 ever_exceeded_memory_budget_(false),
372 rendering_stats_instrumentation_(rendering_stats_instrumentation),
373 did_initialize_visible_tile_(false),
374 did_check_for_completed_tasks_since_last_schedule_tasks_(true),
375 ready_to_activate_check_notifier_(
376 task_runner_,
377 base::Bind(&TileManager::CheckIfReadyToActivate,
378 base::Unretained(this))) {
379 rasterizer_->SetClient(this);
382 TileManager::~TileManager() {
383 // Reset global state and manage. This should cause
384 // our memory usage to drop to zero.
385 global_state_ = GlobalStateThatImpactsTilePriority();
387 RasterTaskQueue empty;
388 rasterizer_->ScheduleTasks(&empty);
389 orphan_raster_tasks_.clear();
391 // This should finish all pending tasks and release any uninitialized
392 // resources.
393 rasterizer_->Shutdown();
394 rasterizer_->CheckForCompletedTasks();
396 prioritized_tiles_.Clear();
398 FreeResourcesForReleasedTiles();
399 CleanUpReleasedTiles();
401 DCHECK_EQ(0u, bytes_releasable_);
402 DCHECK_EQ(0u, resources_releasable_);
405 void TileManager::Release(Tile* tile) {
406 DCHECK(TilePriority() == tile->combined_priority());
408 prioritized_tiles_dirty_ = true;
409 released_tiles_.push_back(tile);
412 void TileManager::DidChangeTilePriority(Tile* tile) {
413 prioritized_tiles_dirty_ = true;
416 bool TileManager::ShouldForceTasksRequiredForActivationToComplete() const {
417 return global_state_.tree_priority != SMOOTHNESS_TAKES_PRIORITY;
420 void TileManager::FreeResourcesForReleasedTiles() {
421 for (std::vector<Tile*>::iterator it = released_tiles_.begin();
422 it != released_tiles_.end();
423 ++it) {
424 Tile* tile = *it;
425 FreeResourcesForTile(tile);
429 void TileManager::CleanUpReleasedTiles() {
430 // Make sure |prioritized_tiles_| doesn't contain any of the tiles
431 // we're about to delete.
432 DCHECK(prioritized_tiles_.IsEmpty());
434 std::vector<Tile*>::iterator it = released_tiles_.begin();
435 while (it != released_tiles_.end()) {
436 Tile* tile = *it;
438 if (tile->HasRasterTask()) {
439 ++it;
440 continue;
443 DCHECK(!tile->HasResources());
444 DCHECK(tiles_.find(tile->id()) != tiles_.end());
445 tiles_.erase(tile->id());
447 LayerCountMap::iterator layer_it =
448 used_layer_counts_.find(tile->layer_id());
449 DCHECK_GT(layer_it->second, 0);
450 if (--layer_it->second == 0) {
451 used_layer_counts_.erase(layer_it);
452 image_decode_tasks_.erase(tile->layer_id());
455 delete tile;
456 it = released_tiles_.erase(it);
460 void TileManager::UpdatePrioritizedTileSetIfNeeded() {
461 if (!prioritized_tiles_dirty_)
462 return;
464 prioritized_tiles_.Clear();
466 FreeResourcesForReleasedTiles();
467 CleanUpReleasedTiles();
469 GetTilesWithAssignedBins(&prioritized_tiles_);
470 prioritized_tiles_dirty_ = false;
473 void TileManager::DidFinishRunningTasks() {
474 TRACE_EVENT0("cc", "TileManager::DidFinishRunningTasks");
476 bool memory_usage_above_limit = resource_pool_->total_memory_usage_bytes() >
477 global_state_.soft_memory_limit_in_bytes;
479 // When OOM, keep re-assigning memory until we reach a steady state
480 // where top-priority tiles are initialized.
481 if (all_tiles_that_need_to_be_rasterized_have_memory_ &&
482 !memory_usage_above_limit)
483 return;
485 rasterizer_->CheckForCompletedTasks();
486 did_check_for_completed_tasks_since_last_schedule_tasks_ = true;
488 TileVector tiles_that_need_to_be_rasterized;
489 AssignGpuMemoryToTiles(&prioritized_tiles_,
490 &tiles_that_need_to_be_rasterized);
492 // |tiles_that_need_to_be_rasterized| will be empty when we reach a
493 // steady memory state. Keep scheduling tasks until we reach this state.
494 if (!tiles_that_need_to_be_rasterized.empty()) {
495 ScheduleTasks(tiles_that_need_to_be_rasterized);
496 return;
499 FreeResourcesForReleasedTiles();
501 resource_pool_->ReduceResourceUsage();
503 // We don't reserve memory for required-for-activation tiles during
504 // accelerated gestures, so we just postpone activation when we don't
505 // have these tiles, and activate after the accelerated gesture.
506 bool allow_rasterize_on_demand =
507 global_state_.tree_priority != SMOOTHNESS_TAKES_PRIORITY;
509 // Use on-demand raster for any required-for-activation tiles that have not
510 // been been assigned memory after reaching a steady memory state. This
511 // ensures that we activate even when OOM.
512 for (TileMap::iterator it = tiles_.begin(); it != tiles_.end(); ++it) {
513 Tile* tile = it->second;
514 ManagedTileState& mts = tile->managed_state();
515 ManagedTileState::TileVersion& tile_version =
516 mts.tile_versions[mts.raster_mode];
518 if (tile->required_for_activation() && !tile_version.IsReadyToDraw()) {
519 // If we can't raster on demand, give up early (and don't activate).
520 if (!allow_rasterize_on_demand)
521 return;
523 tile_version.set_rasterize_on_demand();
524 client_->NotifyTileStateChanged(tile);
528 DCHECK(IsReadyToActivate());
529 ready_to_activate_check_notifier_.Schedule();
532 void TileManager::DidFinishRunningTasksRequiredForActivation() {
533 // This is only a true indication that all tiles required for
534 // activation are initialized when no tiles are OOM. We need to
535 // wait for DidFinishRunningTasks() to be called, try to re-assign
536 // memory and in worst case use on-demand raster when tiles
537 // required for activation are OOM.
538 if (!all_tiles_required_for_activation_have_memory_)
539 return;
541 ready_to_activate_check_notifier_.Schedule();
544 void TileManager::GetTilesWithAssignedBins(PrioritizedTileSet* tiles) {
545 TRACE_EVENT0("cc", "TileManager::GetTilesWithAssignedBins");
547 const TileMemoryLimitPolicy memory_policy = global_state_.memory_limit_policy;
548 const TreePriority tree_priority = global_state_.tree_priority;
550 // For each tree, bin into different categories of tiles.
551 for (TileMap::const_iterator it = tiles_.begin(); it != tiles_.end(); ++it) {
552 Tile* tile = it->second;
553 ManagedTileState& mts = tile->managed_state();
555 const ManagedTileState::TileVersion& tile_version =
556 tile->GetTileVersionForDrawing();
557 bool tile_is_ready_to_draw = tile_version.IsReadyToDraw();
558 bool tile_is_active = tile_is_ready_to_draw ||
559 mts.tile_versions[mts.raster_mode].raster_task_;
561 // Get the active priority and bin.
562 TilePriority active_priority = tile->priority(ACTIVE_TREE);
563 ManagedTileBin active_bin = BinFromTilePriority(active_priority);
565 // Get the pending priority and bin.
566 TilePriority pending_priority = tile->priority(PENDING_TREE);
567 ManagedTileBin pending_bin = BinFromTilePriority(pending_priority);
569 bool pending_is_low_res = pending_priority.resolution == LOW_RESOLUTION;
570 bool pending_is_non_ideal =
571 pending_priority.resolution == NON_IDEAL_RESOLUTION;
572 bool active_is_non_ideal =
573 active_priority.resolution == NON_IDEAL_RESOLUTION;
575 // Adjust bin state based on if ready to draw.
576 active_bin = kBinReadyToDrawMap[tile_is_ready_to_draw][active_bin];
577 pending_bin = kBinReadyToDrawMap[tile_is_ready_to_draw][pending_bin];
579 // Adjust bin state based on if active.
580 active_bin = kBinIsActiveMap[tile_is_active][active_bin];
581 pending_bin = kBinIsActiveMap[tile_is_active][pending_bin];
583 // We never want to paint new non-ideal tiles, as we always have
584 // a high-res tile covering that content (paint that instead).
585 if (!tile_is_ready_to_draw && active_is_non_ideal)
586 active_bin = NEVER_BIN;
587 if (!tile_is_ready_to_draw && pending_is_non_ideal)
588 pending_bin = NEVER_BIN;
590 ManagedTileBin tree_bin[NUM_TREES];
591 tree_bin[ACTIVE_TREE] = kBinPolicyMap[memory_policy][active_bin];
592 tree_bin[PENDING_TREE] = kBinPolicyMap[memory_policy][pending_bin];
594 // Adjust pending bin state for low res tiles. This prevents pending tree
595 // low-res tiles from being initialized before high-res tiles.
596 if (pending_is_low_res)
597 tree_bin[PENDING_TREE] = std::max(tree_bin[PENDING_TREE], EVENTUALLY_BIN);
599 TilePriority tile_priority;
600 switch (tree_priority) {
601 case SAME_PRIORITY_FOR_BOTH_TREES:
602 mts.bin = std::min(tree_bin[ACTIVE_TREE], tree_bin[PENDING_TREE]);
603 tile_priority = tile->combined_priority();
604 break;
605 case SMOOTHNESS_TAKES_PRIORITY:
606 mts.bin = tree_bin[ACTIVE_TREE];
607 tile_priority = active_priority;
608 break;
609 case NEW_CONTENT_TAKES_PRIORITY:
610 mts.bin = tree_bin[PENDING_TREE];
611 tile_priority = pending_priority;
612 break;
613 default:
614 NOTREACHED();
617 // Bump up the priority if we determined it's NEVER_BIN on one tree,
618 // but is still required on the other tree.
619 bool is_in_never_bin_on_both_trees = tree_bin[ACTIVE_TREE] == NEVER_BIN &&
620 tree_bin[PENDING_TREE] == NEVER_BIN;
622 if (mts.bin == NEVER_BIN && !is_in_never_bin_on_both_trees)
623 mts.bin = tile_is_active ? AT_LAST_AND_ACTIVE_BIN : AT_LAST_BIN;
625 mts.resolution = tile_priority.resolution;
626 mts.priority_bin = tile_priority.priority_bin;
627 mts.distance_to_visible = tile_priority.distance_to_visible;
628 mts.required_for_activation = tile_priority.required_for_activation;
630 mts.visible_and_ready_to_draw =
631 tree_bin[ACTIVE_TREE] == NOW_AND_READY_TO_DRAW_BIN;
633 // Tiles that are required for activation shouldn't be in NEVER_BIN unless
634 // smoothness takes priority or memory policy allows nothing to be
635 // initialized.
636 DCHECK(!mts.required_for_activation || mts.bin != NEVER_BIN ||
637 tree_priority == SMOOTHNESS_TAKES_PRIORITY ||
638 memory_policy == ALLOW_NOTHING);
640 // If the tile is in NEVER_BIN and it does not have an active task, then we
641 // can release the resources early. If it does have the task however, we
642 // should keep it in the prioritized tile set to ensure that AssignGpuMemory
643 // can visit it.
644 if (mts.bin == NEVER_BIN &&
645 !mts.tile_versions[mts.raster_mode].raster_task_) {
646 FreeResourcesForTileAndNotifyClientIfTileWasReadyToDraw(tile);
647 continue;
650 // Insert the tile into a priority set.
651 tiles->InsertTile(tile, mts.bin);
655 void TileManager::ManageTiles(const GlobalStateThatImpactsTilePriority& state) {
656 TRACE_EVENT0("cc", "TileManager::ManageTiles");
658 // Update internal state.
659 if (state != global_state_) {
660 global_state_ = state;
661 prioritized_tiles_dirty_ = true;
664 // We need to call CheckForCompletedTasks() once in-between each call
665 // to ScheduleTasks() to prevent canceled tasks from being scheduled.
666 if (!did_check_for_completed_tasks_since_last_schedule_tasks_) {
667 rasterizer_->CheckForCompletedTasks();
668 did_check_for_completed_tasks_since_last_schedule_tasks_ = true;
671 UpdatePrioritizedTileSetIfNeeded();
673 TileVector tiles_that_need_to_be_rasterized;
674 AssignGpuMemoryToTiles(&prioritized_tiles_,
675 &tiles_that_need_to_be_rasterized);
677 // Finally, schedule rasterizer tasks.
678 ScheduleTasks(tiles_that_need_to_be_rasterized);
680 TRACE_EVENT_INSTANT1("cc",
681 "DidManage",
682 TRACE_EVENT_SCOPE_THREAD,
683 "state",
684 BasicStateAsValue());
686 TRACE_COUNTER_ID1("cc",
687 "unused_memory_bytes",
688 this,
689 resource_pool_->total_memory_usage_bytes() -
690 resource_pool_->acquired_memory_usage_bytes());
693 bool TileManager::UpdateVisibleTiles() {
694 TRACE_EVENT0("cc", "TileManager::UpdateVisibleTiles");
696 rasterizer_->CheckForCompletedTasks();
697 did_check_for_completed_tasks_since_last_schedule_tasks_ = true;
699 TRACE_EVENT_INSTANT1(
700 "cc",
701 "DidUpdateVisibleTiles",
702 TRACE_EVENT_SCOPE_THREAD,
703 "stats",
704 RasterTaskCompletionStatsAsValue(update_visible_tiles_stats_));
705 update_visible_tiles_stats_ = RasterTaskCompletionStats();
707 bool did_initialize_visible_tile = did_initialize_visible_tile_;
708 did_initialize_visible_tile_ = false;
709 return did_initialize_visible_tile;
712 scoped_refptr<base::debug::ConvertableToTraceFormat>
713 TileManager::BasicStateAsValue() const {
714 scoped_refptr<base::debug::TracedValue> value =
715 new base::debug::TracedValue();
716 BasicStateAsValueInto(value.get());
717 return value;
720 void TileManager::BasicStateAsValueInto(base::debug::TracedValue* state) const {
721 state->SetInteger("tile_count", tiles_.size());
722 state->BeginDictionary("global_state");
723 global_state_.AsValueInto(state);
724 state->EndDictionary();
727 void TileManager::AllTilesAsValueInto(base::debug::TracedValue* state) const {
728 for (TileMap::const_iterator it = tiles_.begin(); it != tiles_.end(); ++it) {
729 state->BeginDictionary();
730 it->second->AsValueInto(state);
731 state->EndDictionary();
735 void TileManager::AssignGpuMemoryToTiles(
736 PrioritizedTileSet* tiles,
737 TileVector* tiles_that_need_to_be_rasterized) {
738 TRACE_EVENT0("cc", "TileManager::AssignGpuMemoryToTiles");
740 // Maintain the list of released resources that can potentially be re-used
741 // or deleted.
742 // If this operation becomes expensive too, only do this after some
743 // resource(s) was returned. Note that in that case, one also need to
744 // invalidate when releasing some resource from the pool.
745 resource_pool_->CheckBusyResources();
747 // Now give memory out to the tiles until we're out, and build
748 // the needs-to-be-rasterized queue.
749 all_tiles_that_need_to_be_rasterized_have_memory_ = true;
750 all_tiles_required_for_activation_have_memory_ = true;
752 // Cast to prevent overflow.
753 int64 soft_bytes_available =
754 static_cast<int64>(bytes_releasable_) +
755 static_cast<int64>(global_state_.soft_memory_limit_in_bytes) -
756 static_cast<int64>(resource_pool_->acquired_memory_usage_bytes());
757 int64 hard_bytes_available =
758 static_cast<int64>(bytes_releasable_) +
759 static_cast<int64>(global_state_.hard_memory_limit_in_bytes) -
760 static_cast<int64>(resource_pool_->acquired_memory_usage_bytes());
761 int resources_available = resources_releasable_ +
762 global_state_.num_resources_limit -
763 resource_pool_->acquired_resource_count();
764 size_t soft_bytes_allocatable =
765 std::max(static_cast<int64>(0), soft_bytes_available);
766 size_t hard_bytes_allocatable =
767 std::max(static_cast<int64>(0), hard_bytes_available);
768 size_t resources_allocatable = std::max(0, resources_available);
770 size_t bytes_that_exceeded_memory_budget = 0;
771 size_t soft_bytes_left = soft_bytes_allocatable;
772 size_t hard_bytes_left = hard_bytes_allocatable;
774 size_t resources_left = resources_allocatable;
775 bool oomed_soft = false;
776 bool oomed_hard = false;
777 bool have_hit_soft_memory = false; // Soft memory comes after hard.
779 unsigned schedule_priority = 1u;
780 for (PrioritizedTileSet::Iterator it(tiles, true); it; ++it) {
781 Tile* tile = *it;
782 ManagedTileState& mts = tile->managed_state();
784 mts.scheduled_priority = schedule_priority++;
786 mts.raster_mode = tile->DetermineOverallRasterMode();
788 ManagedTileState::TileVersion& tile_version =
789 mts.tile_versions[mts.raster_mode];
791 // If this tile doesn't need a resource, then nothing to do.
792 if (!tile_version.requires_resource())
793 continue;
795 // If the tile is not needed, free it up.
796 if (mts.bin == NEVER_BIN) {
797 FreeResourcesForTileAndNotifyClientIfTileWasReadyToDraw(tile);
798 continue;
801 const bool tile_uses_hard_limit = mts.bin <= NOW_BIN;
802 const size_t bytes_if_allocated = BytesConsumedIfAllocated(tile);
803 const size_t tile_bytes_left =
804 (tile_uses_hard_limit) ? hard_bytes_left : soft_bytes_left;
806 // Hard-limit is reserved for tiles that would cause a calamity
807 // if they were to go away, so by definition they are the highest
808 // priority memory, and must be at the front of the list.
809 DCHECK(!(have_hit_soft_memory && tile_uses_hard_limit));
810 have_hit_soft_memory |= !tile_uses_hard_limit;
812 size_t tile_bytes = 0;
813 size_t tile_resources = 0;
815 // It costs to maintain a resource.
816 for (int mode = 0; mode < NUM_RASTER_MODES; ++mode) {
817 if (mts.tile_versions[mode].resource_) {
818 tile_bytes += bytes_if_allocated;
819 tile_resources++;
823 // Allow lower priority tiles with initialized resources to keep
824 // their memory by only assigning memory to new raster tasks if
825 // they can be scheduled.
826 bool reached_scheduled_raster_tasks_limit =
827 tiles_that_need_to_be_rasterized->size() >= kScheduledRasterTasksLimit;
828 if (!reached_scheduled_raster_tasks_limit) {
829 // If we don't have the required version, and it's not in flight
830 // then we'll have to pay to create a new task.
831 if (!tile_version.resource_ && !tile_version.raster_task_) {
832 tile_bytes += bytes_if_allocated;
833 tile_resources++;
837 // Tile is OOM.
838 if (tile_bytes > tile_bytes_left || tile_resources > resources_left) {
839 FreeResourcesForTileAndNotifyClientIfTileWasReadyToDraw(tile);
841 // This tile was already on screen and now its resources have been
842 // released. In order to prevent checkerboarding, set this tile as
843 // rasterize on demand immediately.
844 if (mts.visible_and_ready_to_draw)
845 tile_version.set_rasterize_on_demand();
847 oomed_soft = true;
848 if (tile_uses_hard_limit) {
849 oomed_hard = true;
850 bytes_that_exceeded_memory_budget += tile_bytes;
852 } else {
853 resources_left -= tile_resources;
854 hard_bytes_left -= tile_bytes;
855 soft_bytes_left =
856 (soft_bytes_left > tile_bytes) ? soft_bytes_left - tile_bytes : 0;
857 if (tile_version.resource_)
858 continue;
861 DCHECK(!tile_version.resource_);
863 // Tile shouldn't be rasterized if |tiles_that_need_to_be_rasterized|
864 // has reached it's limit or we've failed to assign gpu memory to this
865 // or any higher priority tile. Preventing tiles that fit into memory
866 // budget to be rasterized when higher priority tile is oom is
867 // important for two reasons:
868 // 1. Tile size should not impact raster priority.
869 // 2. Tiles with existing raster task could otherwise incorrectly
870 // be added as they are not affected by |bytes_allocatable|.
871 bool can_schedule_tile =
872 !oomed_soft && !reached_scheduled_raster_tasks_limit;
874 if (!can_schedule_tile) {
875 all_tiles_that_need_to_be_rasterized_have_memory_ = false;
876 if (tile->required_for_activation())
877 all_tiles_required_for_activation_have_memory_ = false;
878 it.DisablePriorityOrdering();
879 continue;
882 tiles_that_need_to_be_rasterized->push_back(tile);
885 // OOM reporting uses hard-limit, soft-OOM is normal depending on limit.
886 ever_exceeded_memory_budget_ |= oomed_hard;
887 if (ever_exceeded_memory_budget_) {
888 TRACE_COUNTER_ID2("cc",
889 "over_memory_budget",
890 this,
891 "budget",
892 global_state_.hard_memory_limit_in_bytes,
893 "over",
894 bytes_that_exceeded_memory_budget);
896 UMA_HISTOGRAM_BOOLEAN("TileManager.ExceededMemoryBudget", oomed_hard);
897 memory_stats_from_last_assign_.total_budget_in_bytes =
898 global_state_.hard_memory_limit_in_bytes;
899 memory_stats_from_last_assign_.bytes_allocated =
900 hard_bytes_allocatable - hard_bytes_left;
901 memory_stats_from_last_assign_.bytes_unreleasable =
902 resource_pool_->acquired_memory_usage_bytes() - bytes_releasable_;
903 memory_stats_from_last_assign_.bytes_over = bytes_that_exceeded_memory_budget;
906 void TileManager::FreeResourceForTile(Tile* tile, RasterMode mode) {
907 ManagedTileState& mts = tile->managed_state();
908 if (mts.tile_versions[mode].resource_) {
909 resource_pool_->ReleaseResource(mts.tile_versions[mode].resource_.Pass());
911 DCHECK_GE(bytes_releasable_, BytesConsumedIfAllocated(tile));
912 DCHECK_GE(resources_releasable_, 1u);
914 bytes_releasable_ -= BytesConsumedIfAllocated(tile);
915 --resources_releasable_;
919 void TileManager::FreeResourcesForTile(Tile* tile) {
920 for (int mode = 0; mode < NUM_RASTER_MODES; ++mode) {
921 FreeResourceForTile(tile, static_cast<RasterMode>(mode));
925 void TileManager::FreeUnusedResourcesForTile(Tile* tile) {
926 DCHECK(tile->IsReadyToDraw());
927 ManagedTileState& mts = tile->managed_state();
928 RasterMode used_mode = LOW_QUALITY_RASTER_MODE;
929 for (int mode = 0; mode < NUM_RASTER_MODES; ++mode) {
930 if (mts.tile_versions[mode].IsReadyToDraw()) {
931 used_mode = static_cast<RasterMode>(mode);
932 break;
936 for (int mode = 0; mode < NUM_RASTER_MODES; ++mode) {
937 if (mode != used_mode)
938 FreeResourceForTile(tile, static_cast<RasterMode>(mode));
942 void TileManager::FreeResourcesForTileAndNotifyClientIfTileWasReadyToDraw(
943 Tile* tile) {
944 bool was_ready_to_draw = tile->IsReadyToDraw();
945 FreeResourcesForTile(tile);
946 if (was_ready_to_draw)
947 client_->NotifyTileStateChanged(tile);
950 void TileManager::ScheduleTasks(
951 const TileVector& tiles_that_need_to_be_rasterized) {
952 TRACE_EVENT1("cc",
953 "TileManager::ScheduleTasks",
954 "count",
955 tiles_that_need_to_be_rasterized.size());
957 DCHECK(did_check_for_completed_tasks_since_last_schedule_tasks_);
959 raster_queue_.Reset();
961 // Build a new task queue containing all task currently needed. Tasks
962 // are added in order of priority, highest priority task first.
963 for (TileVector::const_iterator it = tiles_that_need_to_be_rasterized.begin();
964 it != tiles_that_need_to_be_rasterized.end();
965 ++it) {
966 Tile* tile = *it;
967 ManagedTileState& mts = tile->managed_state();
968 ManagedTileState::TileVersion& tile_version =
969 mts.tile_versions[mts.raster_mode];
971 DCHECK(tile_version.requires_resource());
972 DCHECK(!tile_version.resource_);
974 if (!tile_version.raster_task_)
975 tile_version.raster_task_ = CreateRasterTask(tile);
977 raster_queue_.items.push_back(RasterTaskQueue::Item(
978 tile_version.raster_task_.get(), tile->required_for_activation()));
979 raster_queue_.required_for_activation_count +=
980 tile->required_for_activation();
983 // We must reduce the amount of unused resoruces before calling
984 // ScheduleTasks to prevent usage from rising above limits.
985 resource_pool_->ReduceResourceUsage();
987 // Schedule running of |raster_tasks_|. This replaces any previously
988 // scheduled tasks and effectively cancels all tasks not present
989 // in |raster_tasks_|.
990 rasterizer_->ScheduleTasks(&raster_queue_);
992 // It's now safe to clean up orphan tasks as raster worker pool is not
993 // allowed to keep around unreferenced raster tasks after ScheduleTasks() has
994 // been called.
995 orphan_raster_tasks_.clear();
997 did_check_for_completed_tasks_since_last_schedule_tasks_ = false;
1000 scoped_refptr<ImageDecodeTask> TileManager::CreateImageDecodeTask(
1001 Tile* tile,
1002 SkPixelRef* pixel_ref) {
1003 return make_scoped_refptr(new ImageDecodeTaskImpl(
1004 pixel_ref,
1005 tile->layer_id(),
1006 rendering_stats_instrumentation_,
1007 base::Bind(&TileManager::OnImageDecodeTaskCompleted,
1008 base::Unretained(this),
1009 tile->layer_id(),
1010 base::Unretained(pixel_ref))));
1013 scoped_refptr<RasterTask> TileManager::CreateRasterTask(Tile* tile) {
1014 ManagedTileState& mts = tile->managed_state();
1016 scoped_ptr<ScopedResource> resource =
1017 resource_pool_->AcquireResource(tile->size());
1018 const ScopedResource* const_resource = resource.get();
1020 // Create and queue all image decode tasks that this tile depends on.
1021 ImageDecodeTask::Vector decode_tasks;
1022 PixelRefTaskMap& existing_pixel_refs = image_decode_tasks_[tile->layer_id()];
1023 for (PicturePileImpl::PixelRefIterator iter(
1024 tile->content_rect(), tile->contents_scale(), tile->picture_pile());
1025 iter;
1026 ++iter) {
1027 SkPixelRef* pixel_ref = *iter;
1028 uint32_t id = pixel_ref->getGenerationID();
1030 // Append existing image decode task if available.
1031 PixelRefTaskMap::iterator decode_task_it = existing_pixel_refs.find(id);
1032 if (decode_task_it != existing_pixel_refs.end()) {
1033 decode_tasks.push_back(decode_task_it->second);
1034 continue;
1037 // Create and append new image decode task for this pixel ref.
1038 scoped_refptr<ImageDecodeTask> decode_task =
1039 CreateImageDecodeTask(tile, pixel_ref);
1040 decode_tasks.push_back(decode_task);
1041 existing_pixel_refs[id] = decode_task;
1044 return make_scoped_refptr(
1045 new RasterTaskImpl(const_resource,
1046 tile->picture_pile(),
1047 tile->content_rect(),
1048 tile->contents_scale(),
1049 mts.raster_mode,
1050 mts.resolution,
1051 tile->layer_id(),
1052 static_cast<const void*>(tile),
1053 tile->source_frame_number(),
1054 tile->use_picture_analysis(),
1055 rendering_stats_instrumentation_,
1056 base::Bind(&TileManager::OnRasterTaskCompleted,
1057 base::Unretained(this),
1058 tile->id(),
1059 base::Passed(&resource),
1060 mts.raster_mode),
1061 &decode_tasks));
1064 void TileManager::OnImageDecodeTaskCompleted(int layer_id,
1065 SkPixelRef* pixel_ref,
1066 bool was_canceled) {
1067 // If the task was canceled, we need to clean it up
1068 // from |image_decode_tasks_|.
1069 if (!was_canceled)
1070 return;
1072 LayerPixelRefTaskMap::iterator layer_it = image_decode_tasks_.find(layer_id);
1073 if (layer_it == image_decode_tasks_.end())
1074 return;
1076 PixelRefTaskMap& pixel_ref_tasks = layer_it->second;
1077 PixelRefTaskMap::iterator task_it =
1078 pixel_ref_tasks.find(pixel_ref->getGenerationID());
1080 if (task_it != pixel_ref_tasks.end())
1081 pixel_ref_tasks.erase(task_it);
1084 void TileManager::OnRasterTaskCompleted(
1085 Tile::Id tile_id,
1086 scoped_ptr<ScopedResource> resource,
1087 RasterMode raster_mode,
1088 const PicturePileImpl::Analysis& analysis,
1089 bool was_canceled) {
1090 DCHECK(tiles_.find(tile_id) != tiles_.end());
1092 Tile* tile = tiles_[tile_id];
1093 ManagedTileState& mts = tile->managed_state();
1094 ManagedTileState::TileVersion& tile_version = mts.tile_versions[raster_mode];
1095 DCHECK(tile_version.raster_task_);
1096 orphan_raster_tasks_.push_back(tile_version.raster_task_);
1097 tile_version.raster_task_ = NULL;
1099 if (was_canceled) {
1100 ++update_visible_tiles_stats_.canceled_count;
1101 resource_pool_->ReleaseResource(resource.Pass());
1102 return;
1105 ++update_visible_tiles_stats_.completed_count;
1107 if (analysis.is_solid_color) {
1108 tile_version.set_solid_color(analysis.solid_color);
1109 resource_pool_->ReleaseResource(resource.Pass());
1110 } else {
1111 tile_version.set_use_resource();
1112 tile_version.resource_ = resource.Pass();
1114 bytes_releasable_ += BytesConsumedIfAllocated(tile);
1115 ++resources_releasable_;
1118 FreeUnusedResourcesForTile(tile);
1119 if (tile->priority(ACTIVE_TREE).distance_to_visible == 0.f)
1120 did_initialize_visible_tile_ = true;
1122 client_->NotifyTileStateChanged(tile);
1125 scoped_refptr<Tile> TileManager::CreateTile(PicturePileImpl* picture_pile,
1126 const gfx::Size& tile_size,
1127 const gfx::Rect& content_rect,
1128 const gfx::Rect& opaque_rect,
1129 float contents_scale,
1130 int layer_id,
1131 int source_frame_number,
1132 int flags) {
1133 scoped_refptr<Tile> tile = make_scoped_refptr(new Tile(this,
1134 picture_pile,
1135 tile_size,
1136 content_rect,
1137 opaque_rect,
1138 contents_scale,
1139 layer_id,
1140 source_frame_number,
1141 flags));
1142 DCHECK(tiles_.find(tile->id()) == tiles_.end());
1144 tiles_[tile->id()] = tile;
1145 used_layer_counts_[tile->layer_id()]++;
1146 prioritized_tiles_dirty_ = true;
1147 return tile;
1150 void TileManager::SetRasterizerForTesting(Rasterizer* rasterizer) {
1151 rasterizer_ = rasterizer;
1152 rasterizer_->SetClient(this);
1155 bool TileManager::IsReadyToActivate() const {
1156 const std::vector<PictureLayerImpl*>& layers = client_->GetPictureLayers();
1158 for (std::vector<PictureLayerImpl*>::const_iterator it = layers.begin();
1159 it != layers.end();
1160 ++it) {
1161 if (!(*it)->AllTilesRequiredForActivationAreReadyToDraw())
1162 return false;
1165 return true;
1168 void TileManager::CheckIfReadyToActivate() {
1169 TRACE_EVENT0("cc", "TileManager::CheckIfReadyToActivate");
1171 rasterizer_->CheckForCompletedTasks();
1172 did_check_for_completed_tasks_since_last_schedule_tasks_ = true;
1174 if (IsReadyToActivate())
1175 client_->NotifyReadyToActivate();
1178 } // namespace cc