Move VISUAL_STATE promise to activation
[chromium-blink-merge.git] / cc / layers / delegated_renderer_layer_impl.cc
blob63039fb1cdca3cb809f5c7bc419c1cfb321cdd6b
1 // Copyright 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "cc/layers/delegated_renderer_layer_impl.h"
7 #include <algorithm>
8 #include <utility>
10 #include "base/bind.h"
11 #include "base/containers/hash_tables.h"
12 #include "cc/base/math_util.h"
13 #include "cc/layers/append_quads_data.h"
14 #include "cc/layers/render_pass_sink.h"
15 #include "cc/output/delegated_frame_data.h"
16 #include "cc/quads/render_pass_draw_quad.h"
17 #include "cc/quads/solid_color_draw_quad.h"
18 #include "cc/trees/layer_tree_impl.h"
19 #include "cc/trees/occlusion.h"
20 #include "ui/gfx/geometry/rect_conversions.h"
22 namespace cc {
24 DelegatedRendererLayerImpl::DelegatedRendererLayerImpl(LayerTreeImpl* tree_impl,
25 int id)
26 : LayerImpl(tree_impl, id),
27 have_render_passes_to_push_(false),
28 inverse_device_scale_factor_(1.0f),
29 child_id_(0),
30 own_child_id_(false) {
33 DelegatedRendererLayerImpl::~DelegatedRendererLayerImpl() {
34 ClearRenderPasses();
35 ClearChildId();
38 bool DelegatedRendererLayerImpl::HasDelegatedContent() const { return true; }
40 bool DelegatedRendererLayerImpl::HasContributingDelegatedRenderPasses() const {
41 // The root RenderPass for the layer is merged with its target
42 // RenderPass in each frame. So we only have extra RenderPasses
43 // to merge when we have a non-root RenderPass present.
44 return render_passes_in_draw_order_.size() > 1;
47 static ResourceProvider::ResourceId ResourceRemapHelper(
48 bool* invalid_frame,
49 const ResourceProvider::ResourceIdMap& child_to_parent_map,
50 ResourceProvider::ResourceIdSet* resources_in_frame,
51 ResourceProvider::ResourceId id) {
52 ResourceProvider::ResourceIdMap::const_iterator it =
53 child_to_parent_map.find(id);
54 if (it == child_to_parent_map.end()) {
55 *invalid_frame = true;
56 return 0;
59 DCHECK_EQ(it->first, id);
60 ResourceProvider::ResourceId remapped_id = it->second;
61 resources_in_frame->insert(id);
62 return remapped_id;
65 void DelegatedRendererLayerImpl::PushPropertiesTo(LayerImpl* layer) {
66 LayerImpl::PushPropertiesTo(layer);
68 DelegatedRendererLayerImpl* delegated_layer =
69 static_cast<DelegatedRendererLayerImpl*>(layer);
71 // If we have a new child_id to give to the active layer, it should
72 // have already deleted its old child_id.
73 DCHECK(delegated_layer->child_id_ == 0 ||
74 delegated_layer->child_id_ == child_id_);
75 delegated_layer->inverse_device_scale_factor_ = inverse_device_scale_factor_;
76 delegated_layer->child_id_ = child_id_;
77 delegated_layer->own_child_id_ = true;
78 own_child_id_ = false;
80 if (have_render_passes_to_push_) {
81 // This passes ownership of the render passes to the active tree.
82 delegated_layer->SetRenderPasses(&render_passes_in_draw_order_);
83 // Once resources are on the active tree, give them to the ResourceProvider
84 // and release unused resources from the old frame.
85 delegated_layer->TakeOwnershipOfResourcesIfOnActiveTree(resources_);
86 DCHECK(render_passes_in_draw_order_.empty());
87 have_render_passes_to_push_ = false;
90 // This is just a copy for testing, since resources are added to the
91 // ResourceProvider in the pending tree.
92 delegated_layer->resources_ = resources_;
95 void DelegatedRendererLayerImpl::CreateChildIdIfNeeded(
96 const ReturnCallback& return_callback) {
97 if (child_id_)
98 return;
100 ResourceProvider* resource_provider = layer_tree_impl()->resource_provider();
101 child_id_ = resource_provider->CreateChild(return_callback);
102 own_child_id_ = true;
105 void DelegatedRendererLayerImpl::SetFrameData(
106 const DelegatedFrameData* frame_data,
107 const gfx::RectF& damage_in_frame) {
108 DCHECK(child_id_) << "CreateChildIdIfNeeded must be called first.";
109 DCHECK(frame_data);
110 DCHECK(!frame_data->render_pass_list.empty());
111 // A frame with an empty root render pass is invalid.
112 DCHECK(!frame_data->render_pass_list.back()->output_rect.IsEmpty());
114 ResourceProvider* resource_provider = layer_tree_impl()->resource_provider();
115 const ResourceProvider::ResourceIdMap& resource_map =
116 resource_provider->GetChildToParentMap(child_id_);
118 resource_provider->ReceiveFromChild(child_id_, frame_data->resource_list);
120 RenderPassList render_pass_list;
121 RenderPass::CopyAll(frame_data->render_pass_list, &render_pass_list);
123 bool invalid_frame = false;
124 ResourceProvider::ResourceIdSet resources_in_frame;
125 size_t reserve_size = frame_data->resource_list.size();
126 #if defined(COMPILER_MSVC)
127 resources_in_frame.reserve(reserve_size);
128 #elif defined(COMPILER_GCC)
129 // Pre-standard hash-tables only implement resize, which behaves similarly
130 // to reserve for these keys. Resizing to 0 may also be broken (particularly
131 // on stlport).
132 // TODO(jbauman): Replace with reserve when C++11 is supported everywhere.
133 if (reserve_size)
134 resources_in_frame.resize(reserve_size);
135 #endif
136 DrawQuad::ResourceIteratorCallback remap_resources_to_parent_callback =
137 base::Bind(&ResourceRemapHelper,
138 &invalid_frame,
139 resource_map,
140 &resources_in_frame);
141 for (const auto& pass : render_pass_list) {
142 for (const auto& quad : pass->quad_list)
143 quad->IterateResources(remap_resources_to_parent_callback);
146 if (invalid_frame) {
147 // Declare we are still using the last frame's resources. Drops ownership of
148 // any invalid resources, keeping only those in use by the active tree.
149 resource_provider->DeclareUsedResourcesFromChild(child_id_, resources_);
150 return;
153 // Save the new frame's resources, but don't give them to the ResourceProvider
154 // until they are active, since the resources on the active tree will still be
155 // used and we don't want to return them early.
156 resources_.swap(resources_in_frame);
157 TakeOwnershipOfResourcesIfOnActiveTree(resources_);
159 inverse_device_scale_factor_ = 1.0f / frame_data->device_scale_factor;
160 // Display size is already set so we can compute what the damage rect
161 // will be in layer space. The damage may exceed the visible portion of
162 // the frame, so intersect the damage to the layer's bounds.
163 RenderPass* new_root_pass = render_pass_list.back();
164 gfx::Size frame_size = new_root_pass->output_rect.size();
165 gfx::RectF damage_in_layer = damage_in_frame;
166 damage_in_layer.Scale(inverse_device_scale_factor_);
167 SetUpdateRect(gfx::IntersectRects(
168 gfx::UnionRects(update_rect(), gfx::ToEnclosingRect(damage_in_layer)),
169 gfx::Rect(bounds())));
171 SetRenderPasses(&render_pass_list);
172 have_render_passes_to_push_ = true;
175 void DelegatedRendererLayerImpl::TakeOwnershipOfResourcesIfOnActiveTree(
176 const ResourceProvider::ResourceIdSet& resources) {
177 DCHECK(child_id_);
178 if (!layer_tree_impl()->IsActiveTree())
179 return;
180 layer_tree_impl()->resource_provider()->DeclareUsedResourcesFromChild(
181 child_id_, resources);
184 void DelegatedRendererLayerImpl::SetRenderPasses(
185 RenderPassList* render_passes_in_draw_order) {
186 ClearRenderPasses();
188 for (size_t i = 0; i < render_passes_in_draw_order->size(); ++i) {
189 RenderPassList::iterator to_take =
190 render_passes_in_draw_order->begin() + i;
191 render_passes_index_by_id_.insert(
192 std::pair<RenderPassId, int>((*to_take)->id, i));
193 scoped_ptr<RenderPass> taken_render_pass =
194 render_passes_in_draw_order->take(to_take);
195 render_passes_in_draw_order_.push_back(taken_render_pass.Pass());
198 // Give back an empty array instead of nulls.
199 render_passes_in_draw_order->clear();
201 // The render passes given here become part of the RenderSurfaceLayerList, so
202 // changing them requires recomputing the RenderSurfaceLayerList.
203 layer_tree_impl()->set_needs_update_draw_properties();
206 void DelegatedRendererLayerImpl::ClearRenderPasses() {
207 render_passes_index_by_id_.clear();
208 render_passes_in_draw_order_.clear();
211 scoped_ptr<LayerImpl> DelegatedRendererLayerImpl::CreateLayerImpl(
212 LayerTreeImpl* tree_impl) {
213 return DelegatedRendererLayerImpl::Create(tree_impl, id());
216 void DelegatedRendererLayerImpl::ReleaseResources() {
217 ClearRenderPasses();
218 ClearChildId();
221 static inline int IndexToId(int index) { return index + 1; }
222 static inline int IdToIndex(int id) { return id - 1; }
224 RenderPassId DelegatedRendererLayerImpl::FirstContributingRenderPassId() const {
225 return RenderPassId(id(), IndexToId(0));
228 RenderPassId DelegatedRendererLayerImpl::NextContributingRenderPassId(
229 RenderPassId previous) const {
230 return RenderPassId(previous.layer_id, previous.index + 1);
233 bool DelegatedRendererLayerImpl::ConvertDelegatedRenderPassId(
234 RenderPassId delegated_render_pass_id,
235 RenderPassId* output_render_pass_id) const {
236 base::hash_map<RenderPassId, int>::const_iterator found =
237 render_passes_index_by_id_.find(delegated_render_pass_id);
238 if (found == render_passes_index_by_id_.end()) {
239 // Be robust against a RenderPass id that isn't part of the frame.
240 return false;
242 unsigned delegated_render_pass_index = found->second;
243 *output_render_pass_id =
244 RenderPassId(id(), IndexToId(delegated_render_pass_index));
245 return true;
248 void DelegatedRendererLayerImpl::AppendContributingRenderPasses(
249 RenderPassSink* render_pass_sink) {
250 DCHECK(HasContributingDelegatedRenderPasses());
252 const RenderPass* root_delegated_render_pass =
253 render_passes_in_draw_order_.back();
254 gfx::Size frame_size = root_delegated_render_pass->output_rect.size();
255 gfx::Transform delegated_frame_to_root_transform = screen_space_transform();
256 delegated_frame_to_root_transform.Scale(inverse_device_scale_factor_,
257 inverse_device_scale_factor_);
259 for (size_t i = 0; i < render_passes_in_draw_order_.size() - 1; ++i) {
260 RenderPassId output_render_pass_id(-1, -1);
261 bool present =
262 ConvertDelegatedRenderPassId(render_passes_in_draw_order_[i]->id,
263 &output_render_pass_id);
265 // Don't clash with the RenderPass we generate if we own a RenderSurface.
266 DCHECK(present) << render_passes_in_draw_order_[i]->id.layer_id << ", "
267 << render_passes_in_draw_order_[i]->id.index;
268 DCHECK_GT(output_render_pass_id.index, 0);
270 scoped_ptr<RenderPass> copy_pass =
271 render_passes_in_draw_order_[i]->Copy(output_render_pass_id);
272 copy_pass->transform_to_root_target.ConcatTransform(
273 delegated_frame_to_root_transform);
274 render_pass_sink->AppendRenderPass(copy_pass.Pass());
278 bool DelegatedRendererLayerImpl::WillDraw(DrawMode draw_mode,
279 ResourceProvider* resource_provider) {
280 if (draw_mode == DRAW_MODE_RESOURCELESS_SOFTWARE)
281 return false;
282 return LayerImpl::WillDraw(draw_mode, resource_provider);
285 void DelegatedRendererLayerImpl::AppendQuads(
286 RenderPass* render_pass,
287 AppendQuadsData* append_quads_data) {
288 AppendRainbowDebugBorder(render_pass);
290 // This list will be empty after a lost context until a new frame arrives.
291 if (render_passes_in_draw_order_.empty())
292 return;
294 RenderPassId target_render_pass_id = render_pass->id;
296 const RenderPass* root_delegated_render_pass =
297 render_passes_in_draw_order_.back();
299 DCHECK(root_delegated_render_pass->output_rect.origin().IsOrigin());
300 gfx::Size frame_size = root_delegated_render_pass->output_rect.size();
302 // If the index of the RenderPassId is 0, then it is a RenderPass generated
303 // for a layer in this compositor, not the delegating renderer. Then we want
304 // to merge our root RenderPass with the target RenderPass. Otherwise, it is
305 // some RenderPass which we added from the delegating renderer.
306 bool should_merge_root_render_pass_with_target = !target_render_pass_id.index;
307 if (should_merge_root_render_pass_with_target) {
308 // Verify that the RenderPass we are appending to is created by our
309 // render_target.
310 DCHECK(target_render_pass_id.layer_id == render_target()->id());
312 AppendRenderPassQuads(render_pass,
313 root_delegated_render_pass,
314 frame_size);
315 } else {
316 // Verify that the RenderPass we are appending to was created by us.
317 DCHECK(target_render_pass_id.layer_id == id());
319 int render_pass_index = IdToIndex(target_render_pass_id.index);
320 const RenderPass* delegated_render_pass =
321 render_passes_in_draw_order_[render_pass_index];
322 AppendRenderPassQuads(render_pass,
323 delegated_render_pass,
324 frame_size);
328 void DelegatedRendererLayerImpl::AppendRainbowDebugBorder(
329 RenderPass* render_pass) {
330 if (!ShowDebugBorders())
331 return;
333 SharedQuadState* shared_quad_state =
334 render_pass->CreateAndAppendSharedQuadState();
335 PopulateSharedQuadState(shared_quad_state);
337 SkColor color;
338 float border_width;
339 GetDebugBorderProperties(&color, &border_width);
341 SkColor colors[] = {
342 0x80ff0000, // Red.
343 0x80ffa500, // Orange.
344 0x80ffff00, // Yellow.
345 0x80008000, // Green.
346 0x800000ff, // Blue.
347 0x80ee82ee, // Violet.
349 const int kNumColors = arraysize(colors);
351 const int kStripeWidth = 300;
352 const int kStripeHeight = 300;
354 for (size_t i = 0; ; ++i) {
355 // For horizontal lines.
356 int x = kStripeWidth * i;
357 int width = std::min(kStripeWidth, content_bounds().width() - x - 1);
359 // For vertical lines.
360 int y = kStripeHeight * i;
361 int height = std::min(kStripeHeight, content_bounds().height() - y - 1);
363 gfx::Rect top(x, 0, width, border_width);
364 gfx::Rect bottom(x,
365 content_bounds().height() - border_width,
366 width,
367 border_width);
368 gfx::Rect left(0, y, border_width, height);
369 gfx::Rect right(content_bounds().width() - border_width,
371 border_width,
372 height);
374 if (top.IsEmpty() && left.IsEmpty())
375 break;
377 if (!top.IsEmpty()) {
378 bool force_anti_aliasing_off = false;
379 SolidColorDrawQuad* top_quad =
380 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
381 top_quad->SetNew(shared_quad_state, top, top, colors[i % kNumColors],
382 force_anti_aliasing_off);
384 SolidColorDrawQuad* bottom_quad =
385 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
386 bottom_quad->SetNew(shared_quad_state, bottom, bottom,
387 colors[kNumColors - 1 - (i % kNumColors)],
388 force_anti_aliasing_off);
390 if (contents_opaque()) {
391 // Draws a stripe filling the layer vertically with the same color and
392 // width as the horizontal stipes along the layer's top border.
393 SolidColorDrawQuad* solid_quad =
394 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
395 // The inner fill is more transparent then the border.
396 static const float kFillOpacity = 0.1f;
397 SkColor fill_color = SkColorSetA(
398 colors[i % kNumColors],
399 static_cast<uint8_t>(SkColorGetA(colors[i % kNumColors]) *
400 kFillOpacity));
401 gfx::Rect fill_rect(x, 0, width, content_bounds().height());
402 solid_quad->SetNew(shared_quad_state, fill_rect, fill_rect, fill_color,
403 force_anti_aliasing_off);
406 if (!left.IsEmpty()) {
407 bool force_anti_aliasing_off = false;
408 SolidColorDrawQuad* left_quad =
409 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
410 left_quad->SetNew(shared_quad_state, left, left,
411 colors[kNumColors - 1 - (i % kNumColors)],
412 force_anti_aliasing_off);
414 SolidColorDrawQuad* right_quad =
415 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
416 right_quad->SetNew(shared_quad_state, right, right,
417 colors[i % kNumColors], force_anti_aliasing_off);
422 void DelegatedRendererLayerImpl::AppendRenderPassQuads(
423 RenderPass* render_pass,
424 const RenderPass* delegated_render_pass,
425 const gfx::Size& frame_size) const {
426 const SharedQuadState* delegated_shared_quad_state = nullptr;
427 SharedQuadState* output_shared_quad_state = nullptr;
429 gfx::Transform delegated_frame_to_target_transform = draw_transform();
430 delegated_frame_to_target_transform.Scale(inverse_device_scale_factor_,
431 inverse_device_scale_factor_);
432 bool is_root_delegated_render_pass =
433 delegated_render_pass == render_passes_in_draw_order_.back();
434 for (const auto& delegated_quad : delegated_render_pass->quad_list) {
435 if (delegated_quad->shared_quad_state != delegated_shared_quad_state) {
436 delegated_shared_quad_state = delegated_quad->shared_quad_state;
437 output_shared_quad_state = render_pass->CreateAndAppendSharedQuadState();
438 output_shared_quad_state->CopyFrom(delegated_shared_quad_state);
440 if (is_root_delegated_render_pass) {
441 output_shared_quad_state->content_to_target_transform.ConcatTransform(
442 delegated_frame_to_target_transform);
444 if (render_target() == this) {
445 DCHECK(!is_clipped());
446 DCHECK(render_surface());
447 DCHECK_EQ(0, num_unclipped_descendants());
448 output_shared_quad_state->clip_rect =
449 MathUtil::MapEnclosingClippedRect(
450 delegated_frame_to_target_transform,
451 output_shared_quad_state->clip_rect);
452 } else {
453 gfx::Rect clip_rect = drawable_content_rect();
454 if (output_shared_quad_state->is_clipped) {
455 clip_rect.Intersect(MathUtil::MapEnclosingClippedRect(
456 delegated_frame_to_target_transform,
457 output_shared_quad_state->clip_rect));
459 output_shared_quad_state->clip_rect = clip_rect;
460 output_shared_quad_state->is_clipped = true;
463 output_shared_quad_state->opacity *= draw_opacity();
466 DCHECK(output_shared_quad_state);
468 gfx::Transform quad_content_to_delegated_target_space =
469 output_shared_quad_state->content_to_target_transform;
470 if (!is_root_delegated_render_pass) {
471 quad_content_to_delegated_target_space.ConcatTransform(
472 delegated_render_pass->transform_to_root_target);
473 quad_content_to_delegated_target_space.ConcatTransform(
474 delegated_frame_to_target_transform);
477 Occlusion occlusion_in_quad_space =
478 draw_properties()
479 .occlusion_in_content_space.GetOcclusionWithGivenDrawTransform(
480 quad_content_to_delegated_target_space);
482 gfx::Rect quad_visible_rect =
483 occlusion_in_quad_space.GetUnoccludedContentRect(
484 delegated_quad->visible_rect);
486 if (quad_visible_rect.IsEmpty())
487 continue;
489 if (delegated_quad->material != DrawQuad::RENDER_PASS) {
490 DrawQuad* output_quad = render_pass->CopyFromAndAppendDrawQuad(
491 delegated_quad, output_shared_quad_state);
492 output_quad->visible_rect = quad_visible_rect;
493 ValidateQuadResources(output_quad);
494 } else {
495 RenderPassId delegated_contributing_render_pass_id =
496 RenderPassDrawQuad::MaterialCast(delegated_quad)->render_pass_id;
497 RenderPassId output_contributing_render_pass_id(-1, -1);
499 bool present =
500 ConvertDelegatedRenderPassId(delegated_contributing_render_pass_id,
501 &output_contributing_render_pass_id);
502 // |present| being false means the child compositor sent an invalid frame.
503 DCHECK(present);
504 DCHECK(output_contributing_render_pass_id != render_pass->id);
506 RenderPassDrawQuad* output_quad =
507 render_pass->CopyFromAndAppendRenderPassDrawQuad(
508 RenderPassDrawQuad::MaterialCast(delegated_quad),
509 output_shared_quad_state, output_contributing_render_pass_id);
510 output_quad->visible_rect = quad_visible_rect;
511 ValidateQuadResources(output_quad);
516 const char* DelegatedRendererLayerImpl::LayerTypeAsString() const {
517 return "cc::DelegatedRendererLayerImpl";
520 void DelegatedRendererLayerImpl::ClearChildId() {
521 if (!child_id_)
522 return;
524 if (own_child_id_) {
525 ResourceProvider* provider = layer_tree_impl()->resource_provider();
526 provider->DestroyChild(child_id_);
529 resources_.clear();
530 child_id_ = 0;
533 } // namespace cc