Disable flaky AnimatedContentSamplerParameterizedTest.FrameTimestampsConvergeTowardsE...
[chromium-blink-merge.git] / cc / layers / delegated_renderer_layer_impl.cc
blob5b6bc6cc3fff7c9cf45bab30afddc738919dc47e
1 // Copyright 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "cc/layers/delegated_renderer_layer_impl.h"
7 #include <algorithm>
8 #include <utility>
10 #include "base/bind.h"
11 #include "base/containers/hash_tables.h"
12 #include "cc/base/math_util.h"
13 #include "cc/layers/append_quads_data.h"
14 #include "cc/layers/render_pass_sink.h"
15 #include "cc/output/delegated_frame_data.h"
16 #include "cc/quads/render_pass_draw_quad.h"
17 #include "cc/quads/solid_color_draw_quad.h"
18 #include "cc/trees/layer_tree_impl.h"
19 #include "cc/trees/occlusion.h"
20 #include "ui/gfx/geometry/rect_conversions.h"
22 namespace cc {
24 DelegatedRendererLayerImpl::DelegatedRendererLayerImpl(LayerTreeImpl* tree_impl,
25 int id)
26 : LayerImpl(tree_impl, id),
27 have_render_passes_to_push_(false),
28 inverse_device_scale_factor_(1.0f),
29 child_id_(0),
30 own_child_id_(false) {
33 DelegatedRendererLayerImpl::~DelegatedRendererLayerImpl() {
34 ClearRenderPasses();
35 ClearChildId();
38 bool DelegatedRendererLayerImpl::HasDelegatedContent() const { return true; }
40 bool DelegatedRendererLayerImpl::HasContributingDelegatedRenderPasses() const {
41 // The root RenderPass for the layer is merged with its target
42 // RenderPass in each frame. So we only have extra RenderPasses
43 // to merge when we have a non-root RenderPass present.
44 return render_passes_in_draw_order_.size() > 1;
47 static ResourceId ResourceRemapHelper(
48 bool* invalid_frame,
49 const ResourceProvider::ResourceIdMap& child_to_parent_map,
50 ResourceProvider::ResourceIdSet* resources_in_frame,
51 ResourceId id) {
52 ResourceProvider::ResourceIdMap::const_iterator it =
53 child_to_parent_map.find(id);
54 if (it == child_to_parent_map.end()) {
55 *invalid_frame = true;
56 return 0;
59 DCHECK_EQ(it->first, id);
60 ResourceId remapped_id = it->second;
61 resources_in_frame->insert(id);
62 return remapped_id;
65 void DelegatedRendererLayerImpl::PushPropertiesTo(LayerImpl* layer) {
66 LayerImpl::PushPropertiesTo(layer);
68 DelegatedRendererLayerImpl* delegated_layer =
69 static_cast<DelegatedRendererLayerImpl*>(layer);
71 // If we have a new child_id to give to the active layer, it should
72 // have already deleted its old child_id.
73 DCHECK(delegated_layer->child_id_ == 0 ||
74 delegated_layer->child_id_ == child_id_);
75 delegated_layer->inverse_device_scale_factor_ = inverse_device_scale_factor_;
76 delegated_layer->child_id_ = child_id_;
77 delegated_layer->own_child_id_ = true;
78 own_child_id_ = false;
80 if (have_render_passes_to_push_) {
81 DCHECK(child_id_);
82 // This passes ownership of the render passes to the active tree.
83 delegated_layer->SetRenderPasses(&render_passes_in_draw_order_);
84 // Once resources are on the active tree, give them to the ResourceProvider
85 // and release unused resources from the old frame.
86 delegated_layer->TakeOwnershipOfResourcesIfOnActiveTree(resources_);
87 DCHECK(render_passes_in_draw_order_.empty());
88 have_render_passes_to_push_ = false;
91 // This is just a copy for testing, since resources are added to the
92 // ResourceProvider in the pending tree.
93 delegated_layer->resources_ = resources_;
96 void DelegatedRendererLayerImpl::CreateChildIdIfNeeded(
97 const ReturnCallback& return_callback) {
98 if (child_id_)
99 return;
101 ResourceProvider* resource_provider = layer_tree_impl()->resource_provider();
102 child_id_ = resource_provider->CreateChild(return_callback);
103 own_child_id_ = true;
106 void DelegatedRendererLayerImpl::SetFrameData(
107 const DelegatedFrameData* frame_data,
108 const gfx::RectF& damage_in_frame) {
109 DCHECK(child_id_) << "CreateChildIdIfNeeded must be called first.";
110 DCHECK(frame_data);
111 DCHECK(!frame_data->render_pass_list.empty());
112 // A frame with an empty root render pass is invalid.
113 DCHECK(!frame_data->render_pass_list.back()->output_rect.IsEmpty());
115 ResourceProvider* resource_provider = layer_tree_impl()->resource_provider();
116 const ResourceProvider::ResourceIdMap& resource_map =
117 resource_provider->GetChildToParentMap(child_id_);
119 resource_provider->ReceiveFromChild(child_id_, frame_data->resource_list);
121 RenderPassList render_pass_list;
122 RenderPass::CopyAll(frame_data->render_pass_list, &render_pass_list);
124 bool invalid_frame = false;
125 ResourceProvider::ResourceIdSet resources_in_frame;
126 size_t reserve_size = frame_data->resource_list.size();
127 #if defined(COMPILER_MSVC)
128 resources_in_frame.reserve(reserve_size);
129 #elif defined(COMPILER_GCC)
130 // Pre-standard hash-tables only implement resize, which behaves similarly
131 // to reserve for these keys. Resizing to 0 may also be broken (particularly
132 // on stlport).
133 // TODO(jbauman): Replace with reserve when C++11 is supported everywhere.
134 if (reserve_size)
135 resources_in_frame.resize(reserve_size);
136 #endif
137 DrawQuad::ResourceIteratorCallback remap_resources_to_parent_callback =
138 base::Bind(&ResourceRemapHelper,
139 &invalid_frame,
140 resource_map,
141 &resources_in_frame);
142 for (const auto& pass : render_pass_list) {
143 for (const auto& quad : pass->quad_list)
144 quad->IterateResources(remap_resources_to_parent_callback);
147 if (invalid_frame) {
148 // Declare we are still using the last frame's resources. Drops ownership of
149 // any invalid resources, keeping only those in use by the active tree.
150 resource_provider->DeclareUsedResourcesFromChild(child_id_, resources_);
151 return;
154 // Save the new frame's resources, but don't give them to the ResourceProvider
155 // until they are active, since the resources on the active tree will still be
156 // used and we don't want to return them early.
157 resources_.swap(resources_in_frame);
158 TakeOwnershipOfResourcesIfOnActiveTree(resources_);
160 inverse_device_scale_factor_ = 1.0f / frame_data->device_scale_factor;
161 // Display size is already set so we can compute what the damage rect
162 // will be in layer space. The damage may exceed the visible portion of
163 // the frame, so intersect the damage to the layer's bounds.
164 RenderPass* new_root_pass = render_pass_list.back();
165 gfx::Size frame_size = new_root_pass->output_rect.size();
166 gfx::RectF damage_in_layer = damage_in_frame;
167 damage_in_layer.Scale(inverse_device_scale_factor_);
168 SetUpdateRect(gfx::IntersectRects(
169 gfx::UnionRects(update_rect(), gfx::ToEnclosingRect(damage_in_layer)),
170 gfx::Rect(bounds())));
172 SetRenderPasses(&render_pass_list);
173 have_render_passes_to_push_ = true;
176 void DelegatedRendererLayerImpl::TakeOwnershipOfResourcesIfOnActiveTree(
177 const ResourceProvider::ResourceIdSet& resources) {
178 DCHECK(child_id_);
179 if (!layer_tree_impl()->IsActiveTree())
180 return;
181 layer_tree_impl()->resource_provider()->DeclareUsedResourcesFromChild(
182 child_id_, resources);
185 void DelegatedRendererLayerImpl::SetRenderPasses(
186 RenderPassList* render_passes_in_draw_order) {
187 ClearRenderPasses();
189 for (size_t i = 0; i < render_passes_in_draw_order->size(); ++i) {
190 RenderPassList::iterator to_take =
191 render_passes_in_draw_order->begin() + i;
192 render_passes_index_by_id_.insert(
193 std::pair<RenderPassId, int>((*to_take)->id, i));
194 scoped_ptr<RenderPass> taken_render_pass =
195 render_passes_in_draw_order->take(to_take);
196 render_passes_in_draw_order_.push_back(taken_render_pass.Pass());
199 // Give back an empty array instead of nulls.
200 render_passes_in_draw_order->clear();
202 // The render passes given here become part of the RenderSurfaceLayerList, so
203 // changing them requires recomputing the RenderSurfaceLayerList.
204 layer_tree_impl()->set_needs_update_draw_properties();
207 void DelegatedRendererLayerImpl::ClearRenderPasses() {
208 render_passes_index_by_id_.clear();
209 render_passes_in_draw_order_.clear();
212 scoped_ptr<LayerImpl> DelegatedRendererLayerImpl::CreateLayerImpl(
213 LayerTreeImpl* tree_impl) {
214 return DelegatedRendererLayerImpl::Create(tree_impl, id());
217 void DelegatedRendererLayerImpl::ReleaseResources() {
218 ClearRenderPasses();
219 ClearChildId();
220 have_render_passes_to_push_ = false;
223 static inline int IndexToId(int index) { return index + 1; }
224 static inline int IdToIndex(int id) { return id - 1; }
226 RenderPassId DelegatedRendererLayerImpl::FirstContributingRenderPassId() const {
227 return RenderPassId(id(), IndexToId(0));
230 RenderPassId DelegatedRendererLayerImpl::NextContributingRenderPassId(
231 RenderPassId previous) const {
232 return RenderPassId(previous.layer_id, previous.index + 1);
235 bool DelegatedRendererLayerImpl::ConvertDelegatedRenderPassId(
236 RenderPassId delegated_render_pass_id,
237 RenderPassId* output_render_pass_id) const {
238 base::hash_map<RenderPassId, int>::const_iterator found =
239 render_passes_index_by_id_.find(delegated_render_pass_id);
240 if (found == render_passes_index_by_id_.end()) {
241 // Be robust against a RenderPass id that isn't part of the frame.
242 return false;
244 unsigned delegated_render_pass_index = found->second;
245 *output_render_pass_id =
246 RenderPassId(id(), IndexToId(delegated_render_pass_index));
247 return true;
250 void DelegatedRendererLayerImpl::AppendContributingRenderPasses(
251 RenderPassSink* render_pass_sink) {
252 DCHECK(HasContributingDelegatedRenderPasses());
254 const RenderPass* root_delegated_render_pass =
255 render_passes_in_draw_order_.back();
256 gfx::Size frame_size = root_delegated_render_pass->output_rect.size();
257 gfx::Transform delegated_frame_to_root_transform = screen_space_transform();
258 delegated_frame_to_root_transform.Scale(inverse_device_scale_factor_,
259 inverse_device_scale_factor_);
261 for (size_t i = 0; i < render_passes_in_draw_order_.size() - 1; ++i) {
262 RenderPassId output_render_pass_id(-1, -1);
263 bool present =
264 ConvertDelegatedRenderPassId(render_passes_in_draw_order_[i]->id,
265 &output_render_pass_id);
267 // Don't clash with the RenderPass we generate if we own a RenderSurface.
268 DCHECK(present) << render_passes_in_draw_order_[i]->id.layer_id << ", "
269 << render_passes_in_draw_order_[i]->id.index;
270 DCHECK_GT(output_render_pass_id.index, 0);
272 scoped_ptr<RenderPass> copy_pass =
273 render_passes_in_draw_order_[i]->Copy(output_render_pass_id);
274 copy_pass->transform_to_root_target.ConcatTransform(
275 delegated_frame_to_root_transform);
276 render_pass_sink->AppendRenderPass(copy_pass.Pass());
280 bool DelegatedRendererLayerImpl::WillDraw(DrawMode draw_mode,
281 ResourceProvider* resource_provider) {
282 if (draw_mode == DRAW_MODE_RESOURCELESS_SOFTWARE)
283 return false;
284 return LayerImpl::WillDraw(draw_mode, resource_provider);
287 void DelegatedRendererLayerImpl::AppendQuads(
288 RenderPass* render_pass,
289 AppendQuadsData* append_quads_data) {
290 AppendRainbowDebugBorder(render_pass);
292 // This list will be empty after a lost context until a new frame arrives.
293 if (render_passes_in_draw_order_.empty())
294 return;
296 RenderPassId target_render_pass_id = render_pass->id;
298 const RenderPass* root_delegated_render_pass =
299 render_passes_in_draw_order_.back();
301 DCHECK(root_delegated_render_pass->output_rect.origin().IsOrigin());
302 gfx::Size frame_size = root_delegated_render_pass->output_rect.size();
304 // If the index of the RenderPassId is 0, then it is a RenderPass generated
305 // for a layer in this compositor, not the delegating renderer. Then we want
306 // to merge our root RenderPass with the target RenderPass. Otherwise, it is
307 // some RenderPass which we added from the delegating renderer.
308 bool should_merge_root_render_pass_with_target = !target_render_pass_id.index;
309 if (should_merge_root_render_pass_with_target) {
310 // Verify that the RenderPass we are appending to is created by our
311 // render_target.
312 DCHECK(target_render_pass_id.layer_id == render_target()->id());
314 AppendRenderPassQuads(render_pass,
315 root_delegated_render_pass,
316 frame_size);
317 } else {
318 // Verify that the RenderPass we are appending to was created by us.
319 DCHECK(target_render_pass_id.layer_id == id());
321 int render_pass_index = IdToIndex(target_render_pass_id.index);
322 const RenderPass* delegated_render_pass =
323 render_passes_in_draw_order_[render_pass_index];
324 AppendRenderPassQuads(render_pass,
325 delegated_render_pass,
326 frame_size);
330 void DelegatedRendererLayerImpl::AppendRainbowDebugBorder(
331 RenderPass* render_pass) {
332 if (!ShowDebugBorders())
333 return;
335 SharedQuadState* shared_quad_state =
336 render_pass->CreateAndAppendSharedQuadState();
337 PopulateSharedQuadState(shared_quad_state);
339 SkColor color;
340 float border_width;
341 GetDebugBorderProperties(&color, &border_width);
343 SkColor colors[] = {
344 0x80ff0000, // Red.
345 0x80ffa500, // Orange.
346 0x80ffff00, // Yellow.
347 0x80008000, // Green.
348 0x800000ff, // Blue.
349 0x80ee82ee, // Violet.
351 const int kNumColors = arraysize(colors);
353 const int kStripeWidth = 300;
354 const int kStripeHeight = 300;
356 for (size_t i = 0; ; ++i) {
357 // For horizontal lines.
358 int x = kStripeWidth * i;
359 int width = std::min(kStripeWidth, content_bounds().width() - x - 1);
361 // For vertical lines.
362 int y = kStripeHeight * i;
363 int height = std::min(kStripeHeight, content_bounds().height() - y - 1);
365 gfx::Rect top(x, 0, width, border_width);
366 gfx::Rect bottom(x,
367 content_bounds().height() - border_width,
368 width,
369 border_width);
370 gfx::Rect left(0, y, border_width, height);
371 gfx::Rect right(content_bounds().width() - border_width,
373 border_width,
374 height);
376 if (top.IsEmpty() && left.IsEmpty())
377 break;
379 if (!top.IsEmpty()) {
380 bool force_anti_aliasing_off = false;
381 SolidColorDrawQuad* top_quad =
382 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
383 top_quad->SetNew(shared_quad_state, top, top, colors[i % kNumColors],
384 force_anti_aliasing_off);
386 SolidColorDrawQuad* bottom_quad =
387 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
388 bottom_quad->SetNew(shared_quad_state, bottom, bottom,
389 colors[kNumColors - 1 - (i % kNumColors)],
390 force_anti_aliasing_off);
392 if (contents_opaque()) {
393 // Draws a stripe filling the layer vertically with the same color and
394 // width as the horizontal stipes along the layer's top border.
395 SolidColorDrawQuad* solid_quad =
396 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
397 // The inner fill is more transparent then the border.
398 static const float kFillOpacity = 0.1f;
399 SkColor fill_color = SkColorSetA(
400 colors[i % kNumColors],
401 static_cast<uint8_t>(SkColorGetA(colors[i % kNumColors]) *
402 kFillOpacity));
403 gfx::Rect fill_rect(x, 0, width, content_bounds().height());
404 solid_quad->SetNew(shared_quad_state, fill_rect, fill_rect, fill_color,
405 force_anti_aliasing_off);
408 if (!left.IsEmpty()) {
409 bool force_anti_aliasing_off = false;
410 SolidColorDrawQuad* left_quad =
411 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
412 left_quad->SetNew(shared_quad_state, left, left,
413 colors[kNumColors - 1 - (i % kNumColors)],
414 force_anti_aliasing_off);
416 SolidColorDrawQuad* right_quad =
417 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
418 right_quad->SetNew(shared_quad_state, right, right,
419 colors[i % kNumColors], force_anti_aliasing_off);
424 void DelegatedRendererLayerImpl::AppendRenderPassQuads(
425 RenderPass* render_pass,
426 const RenderPass* delegated_render_pass,
427 const gfx::Size& frame_size) const {
428 const SharedQuadState* delegated_shared_quad_state = nullptr;
429 SharedQuadState* output_shared_quad_state = nullptr;
431 gfx::Transform delegated_frame_to_target_transform = draw_transform();
432 delegated_frame_to_target_transform.Scale(inverse_device_scale_factor_,
433 inverse_device_scale_factor_);
434 bool is_root_delegated_render_pass =
435 delegated_render_pass == render_passes_in_draw_order_.back();
436 for (const auto& delegated_quad : delegated_render_pass->quad_list) {
437 if (delegated_quad->shared_quad_state != delegated_shared_quad_state) {
438 delegated_shared_quad_state = delegated_quad->shared_quad_state;
439 output_shared_quad_state = render_pass->CreateAndAppendSharedQuadState();
440 output_shared_quad_state->CopyFrom(delegated_shared_quad_state);
442 if (is_root_delegated_render_pass) {
443 output_shared_quad_state->content_to_target_transform.ConcatTransform(
444 delegated_frame_to_target_transform);
446 if (render_target() == this) {
447 DCHECK(!is_clipped());
448 DCHECK(render_surface());
449 DCHECK_EQ(0, num_unclipped_descendants());
450 output_shared_quad_state->clip_rect =
451 MathUtil::MapEnclosingClippedRect(
452 delegated_frame_to_target_transform,
453 output_shared_quad_state->clip_rect);
454 } else {
455 gfx::Rect clip_rect = drawable_content_rect();
456 if (output_shared_quad_state->is_clipped) {
457 clip_rect.Intersect(MathUtil::MapEnclosingClippedRect(
458 delegated_frame_to_target_transform,
459 output_shared_quad_state->clip_rect));
461 output_shared_quad_state->clip_rect = clip_rect;
462 output_shared_quad_state->is_clipped = true;
465 output_shared_quad_state->opacity *= draw_opacity();
468 DCHECK(output_shared_quad_state);
470 gfx::Transform quad_content_to_delegated_target_space =
471 output_shared_quad_state->content_to_target_transform;
472 if (!is_root_delegated_render_pass) {
473 quad_content_to_delegated_target_space.ConcatTransform(
474 delegated_render_pass->transform_to_root_target);
475 quad_content_to_delegated_target_space.ConcatTransform(
476 delegated_frame_to_target_transform);
479 Occlusion occlusion_in_quad_space =
480 draw_properties()
481 .occlusion_in_content_space.GetOcclusionWithGivenDrawTransform(
482 quad_content_to_delegated_target_space);
484 gfx::Rect quad_visible_rect =
485 occlusion_in_quad_space.GetUnoccludedContentRect(
486 delegated_quad->visible_rect);
488 if (quad_visible_rect.IsEmpty())
489 continue;
491 if (delegated_quad->material != DrawQuad::RENDER_PASS) {
492 DrawQuad* output_quad = render_pass->CopyFromAndAppendDrawQuad(
493 delegated_quad, output_shared_quad_state);
494 output_quad->visible_rect = quad_visible_rect;
495 ValidateQuadResources(output_quad);
496 } else {
497 RenderPassId delegated_contributing_render_pass_id =
498 RenderPassDrawQuad::MaterialCast(delegated_quad)->render_pass_id;
499 RenderPassId output_contributing_render_pass_id(-1, -1);
501 bool present =
502 ConvertDelegatedRenderPassId(delegated_contributing_render_pass_id,
503 &output_contributing_render_pass_id);
504 // |present| being false means the child compositor sent an invalid frame.
505 DCHECK(present);
506 DCHECK(output_contributing_render_pass_id != render_pass->id);
508 RenderPassDrawQuad* output_quad =
509 render_pass->CopyFromAndAppendRenderPassDrawQuad(
510 RenderPassDrawQuad::MaterialCast(delegated_quad),
511 output_shared_quad_state, output_contributing_render_pass_id);
512 output_quad->visible_rect = quad_visible_rect;
513 ValidateQuadResources(output_quad);
518 const char* DelegatedRendererLayerImpl::LayerTypeAsString() const {
519 return "cc::DelegatedRendererLayerImpl";
522 void DelegatedRendererLayerImpl::ClearChildId() {
523 if (!child_id_)
524 return;
526 if (own_child_id_) {
527 ResourceProvider* provider = layer_tree_impl()->resource_provider();
528 provider->DestroyChild(child_id_);
531 resources_.clear();
532 child_id_ = 0;
535 } // namespace cc