Mac: Fix SingleWebContentsDialogManagerCocoa:: GetHostView null pointer access
[chromium-blink-merge.git] / cc / layers / delegated_renderer_layer_impl.cc
blob9235af5a82691ed78bf1ad34b4d7d5203dab6d31
1 // Copyright 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "cc/layers/delegated_renderer_layer_impl.h"
7 #include <algorithm>
8 #include <utility>
10 #include "base/bind.h"
11 #include "base/containers/hash_tables.h"
12 #include "cc/base/math_util.h"
13 #include "cc/layers/append_quads_data.h"
14 #include "cc/layers/render_pass_sink.h"
15 #include "cc/output/delegated_frame_data.h"
16 #include "cc/quads/render_pass_draw_quad.h"
17 #include "cc/quads/solid_color_draw_quad.h"
18 #include "cc/trees/layer_tree_impl.h"
19 #include "cc/trees/occlusion.h"
20 #include "ui/gfx/geometry/rect_conversions.h"
22 namespace cc {
24 DelegatedRendererLayerImpl::DelegatedRendererLayerImpl(LayerTreeImpl* tree_impl,
25 int id)
26 : LayerImpl(tree_impl, id),
27 have_render_passes_to_push_(false),
28 inverse_device_scale_factor_(1.0f),
29 child_id_(0),
30 own_child_id_(false) {
33 DelegatedRendererLayerImpl::~DelegatedRendererLayerImpl() {
34 ClearRenderPasses();
35 ClearChildId();
38 bool DelegatedRendererLayerImpl::HasDelegatedContent() const { return true; }
40 bool DelegatedRendererLayerImpl::HasContributingDelegatedRenderPasses() const {
41 // The root RenderPass for the layer is merged with its target
42 // RenderPass in each frame. So we only have extra RenderPasses
43 // to merge when we have a non-root RenderPass present.
44 return render_passes_in_draw_order_.size() > 1;
47 void DelegatedRendererLayerImpl::PushPropertiesTo(LayerImpl* layer) {
48 LayerImpl::PushPropertiesTo(layer);
50 DelegatedRendererLayerImpl* delegated_layer =
51 static_cast<DelegatedRendererLayerImpl*>(layer);
53 // If we have a new child_id to give to the active layer, it should
54 // have already deleted its old child_id.
55 DCHECK(delegated_layer->child_id_ == 0 ||
56 delegated_layer->child_id_ == child_id_);
57 delegated_layer->inverse_device_scale_factor_ = inverse_device_scale_factor_;
58 delegated_layer->child_id_ = child_id_;
59 delegated_layer->own_child_id_ = true;
60 own_child_id_ = false;
62 if (have_render_passes_to_push_) {
63 DCHECK(child_id_);
64 // This passes ownership of the render passes to the active tree.
65 delegated_layer->SetRenderPasses(&render_passes_in_draw_order_);
66 // Once resources are on the active tree, give them to the ResourceProvider
67 // and release unused resources from the old frame.
68 delegated_layer->TakeOwnershipOfResourcesIfOnActiveTree(resources_);
69 DCHECK(render_passes_in_draw_order_.empty());
70 have_render_passes_to_push_ = false;
73 // This is just a copy for testing, since resources are added to the
74 // ResourceProvider in the pending tree.
75 delegated_layer->resources_ = resources_;
78 void DelegatedRendererLayerImpl::CreateChildIdIfNeeded(
79 const ReturnCallback& return_callback) {
80 if (child_id_)
81 return;
83 ResourceProvider* resource_provider = layer_tree_impl()->resource_provider();
84 child_id_ = resource_provider->CreateChild(return_callback);
85 own_child_id_ = true;
88 void DelegatedRendererLayerImpl::SetFrameData(
89 const DelegatedFrameData* frame_data,
90 const gfx::RectF& damage_in_frame) {
91 DCHECK(child_id_) << "CreateChildIdIfNeeded must be called first.";
92 DCHECK(frame_data);
93 DCHECK(!frame_data->render_pass_list.empty());
94 // A frame with an empty root render pass is invalid.
95 DCHECK(!frame_data->render_pass_list.back()->output_rect.IsEmpty());
97 ResourceProvider* resource_provider = layer_tree_impl()->resource_provider();
98 const ResourceProvider::ResourceIdMap& resource_map =
99 resource_provider->GetChildToParentMap(child_id_);
101 resource_provider->ReceiveFromChild(child_id_, frame_data->resource_list);
103 RenderPassList render_pass_list;
104 RenderPass::CopyAll(frame_data->render_pass_list, &render_pass_list);
106 bool invalid_frame = false;
107 ResourceProvider::ResourceIdSet resources_in_frame;
108 size_t reserve_size = frame_data->resource_list.size();
109 #if defined(COMPILER_MSVC)
110 resources_in_frame.reserve(reserve_size);
111 #elif defined(COMPILER_GCC)
112 // Pre-standard hash-tables only implement resize, which behaves similarly
113 // to reserve for these keys. Resizing to 0 may also be broken (particularly
114 // on stlport).
115 // TODO(jbauman): Replace with reserve when C++11 is supported everywhere.
116 if (reserve_size)
117 resources_in_frame.resize(reserve_size);
118 #endif
119 for (const auto& pass : render_pass_list) {
120 for (const auto& quad : pass->quad_list) {
121 for (ResourceId& resource_id : quad->resources) {
122 ResourceProvider::ResourceIdMap::const_iterator it =
123 resource_map.find(resource_id);
124 if (it == resource_map.end()) {
125 invalid_frame = true;
126 break;
129 DCHECK_EQ(it->first, resource_id);
130 ResourceId remapped_id = it->second;
131 resources_in_frame.insert(resource_id);
132 resource_id = remapped_id;
137 if (invalid_frame) {
138 // Declare we are still using the last frame's resources. Drops ownership of
139 // any invalid resources, keeping only those in use by the active tree.
140 resource_provider->DeclareUsedResourcesFromChild(child_id_, resources_);
141 return;
144 // Save the new frame's resources, but don't give them to the ResourceProvider
145 // until they are active, since the resources on the active tree will still be
146 // used and we don't want to return them early.
147 resources_.swap(resources_in_frame);
148 TakeOwnershipOfResourcesIfOnActiveTree(resources_);
150 inverse_device_scale_factor_ = 1.0f / frame_data->device_scale_factor;
151 // Display size is already set so we can compute what the damage rect
152 // will be in layer space. The damage may exceed the visible portion of
153 // the frame, so intersect the damage to the layer's bounds.
154 RenderPass* new_root_pass = render_pass_list.back();
155 gfx::Size frame_size = new_root_pass->output_rect.size();
156 gfx::RectF damage_in_layer = damage_in_frame;
157 damage_in_layer.Scale(inverse_device_scale_factor_);
158 SetUpdateRect(gfx::IntersectRects(
159 gfx::UnionRects(update_rect(), gfx::ToEnclosingRect(damage_in_layer)),
160 gfx::Rect(bounds())));
162 SetRenderPasses(&render_pass_list);
163 have_render_passes_to_push_ = true;
166 void DelegatedRendererLayerImpl::TakeOwnershipOfResourcesIfOnActiveTree(
167 const ResourceProvider::ResourceIdSet& resources) {
168 DCHECK(child_id_);
169 if (!layer_tree_impl()->IsActiveTree())
170 return;
171 layer_tree_impl()->resource_provider()->DeclareUsedResourcesFromChild(
172 child_id_, resources);
175 void DelegatedRendererLayerImpl::SetRenderPasses(
176 RenderPassList* render_passes_in_draw_order) {
177 ClearRenderPasses();
179 for (size_t i = 0; i < render_passes_in_draw_order->size(); ++i) {
180 RenderPassList::iterator to_take =
181 render_passes_in_draw_order->begin() + i;
182 render_passes_index_by_id_.insert(
183 RenderPassToIndexMap::value_type((*to_take)->id, i));
184 scoped_ptr<RenderPass> taken_render_pass =
185 render_passes_in_draw_order->take(to_take);
186 render_passes_in_draw_order_.push_back(taken_render_pass.Pass());
189 // Give back an empty array instead of nulls.
190 render_passes_in_draw_order->clear();
192 // The render passes given here become part of the RenderSurfaceLayerList, so
193 // changing them requires recomputing the RenderSurfaceLayerList.
194 layer_tree_impl()->set_needs_update_draw_properties();
197 void DelegatedRendererLayerImpl::ClearRenderPasses() {
198 render_passes_index_by_id_.clear();
199 render_passes_in_draw_order_.clear();
202 scoped_ptr<LayerImpl> DelegatedRendererLayerImpl::CreateLayerImpl(
203 LayerTreeImpl* tree_impl) {
204 return DelegatedRendererLayerImpl::Create(tree_impl, id());
207 void DelegatedRendererLayerImpl::ReleaseResources() {
208 ClearRenderPasses();
209 ClearChildId();
210 have_render_passes_to_push_ = false;
213 static inline size_t IndexToId(size_t index) {
214 return index + 1;
216 static inline size_t IdToIndex(size_t id) {
217 DCHECK_GT(id, 0u);
218 return id - 1;
221 RenderPassId DelegatedRendererLayerImpl::FirstContributingRenderPassId() const {
222 return RenderPassId(id(), IndexToId(0));
225 RenderPassId DelegatedRendererLayerImpl::NextContributingRenderPassId(
226 RenderPassId previous) const {
227 return RenderPassId(previous.layer_id, previous.index + 1);
230 bool DelegatedRendererLayerImpl::ConvertDelegatedRenderPassId(
231 RenderPassId delegated_render_pass_id,
232 RenderPassId* output_render_pass_id) const {
233 RenderPassToIndexMap::const_iterator found =
234 render_passes_index_by_id_.find(delegated_render_pass_id);
235 if (found == render_passes_index_by_id_.end()) {
236 // Be robust against a RenderPass id that isn't part of the frame.
237 return false;
239 size_t delegated_render_pass_index = found->second;
240 *output_render_pass_id =
241 RenderPassId(id(), IndexToId(delegated_render_pass_index));
242 return true;
245 void DelegatedRendererLayerImpl::AppendContributingRenderPasses(
246 RenderPassSink* render_pass_sink) {
247 DCHECK(HasContributingDelegatedRenderPasses());
249 const RenderPass* root_delegated_render_pass =
250 render_passes_in_draw_order_.back();
251 gfx::Size frame_size = root_delegated_render_pass->output_rect.size();
252 gfx::Transform delegated_frame_to_root_transform = screen_space_transform();
253 delegated_frame_to_root_transform.Scale(inverse_device_scale_factor_,
254 inverse_device_scale_factor_);
256 for (size_t i = 0; i < render_passes_in_draw_order_.size() - 1; ++i) {
257 RenderPassId output_render_pass_id;
258 bool present =
259 ConvertDelegatedRenderPassId(render_passes_in_draw_order_[i]->id,
260 &output_render_pass_id);
262 // Don't clash with the RenderPass we generate if we own a RenderSurface.
263 DCHECK(present) << render_passes_in_draw_order_[i]->id.layer_id << ", "
264 << render_passes_in_draw_order_[i]->id.index;
265 DCHECK_GT(output_render_pass_id.index, 0u);
267 scoped_ptr<RenderPass> copy_pass =
268 render_passes_in_draw_order_[i]->Copy(output_render_pass_id);
269 copy_pass->transform_to_root_target.ConcatTransform(
270 delegated_frame_to_root_transform);
271 render_pass_sink->AppendRenderPass(copy_pass.Pass());
275 bool DelegatedRendererLayerImpl::WillDraw(DrawMode draw_mode,
276 ResourceProvider* resource_provider) {
277 if (draw_mode == DRAW_MODE_RESOURCELESS_SOFTWARE)
278 return false;
279 return LayerImpl::WillDraw(draw_mode, resource_provider);
282 void DelegatedRendererLayerImpl::AppendQuads(
283 RenderPass* render_pass,
284 AppendQuadsData* append_quads_data) {
285 AppendRainbowDebugBorder(render_pass);
287 // This list will be empty after a lost context until a new frame arrives.
288 if (render_passes_in_draw_order_.empty())
289 return;
291 RenderPassId target_render_pass_id = render_pass->id;
293 const RenderPass* root_delegated_render_pass =
294 render_passes_in_draw_order_.back();
296 DCHECK(root_delegated_render_pass->output_rect.origin().IsOrigin());
297 gfx::Size frame_size = root_delegated_render_pass->output_rect.size();
299 // If the index of the RenderPassId is 0, then it is a RenderPass generated
300 // for a layer in this compositor, not the delegating renderer. Then we want
301 // to merge our root RenderPass with the target RenderPass. Otherwise, it is
302 // some RenderPass which we added from the delegating renderer.
303 bool should_merge_root_render_pass_with_target = !target_render_pass_id.index;
304 if (should_merge_root_render_pass_with_target) {
305 // Verify that the RenderPass we are appending to is created by our
306 // render_target.
307 DCHECK(target_render_pass_id.layer_id == render_target()->id());
309 AppendRenderPassQuads(render_pass,
310 root_delegated_render_pass,
311 frame_size);
312 } else {
313 // Verify that the RenderPass we are appending to was created by us.
314 DCHECK(target_render_pass_id.layer_id == id());
316 size_t render_pass_index = IdToIndex(target_render_pass_id.index);
317 const RenderPass* delegated_render_pass =
318 render_passes_in_draw_order_[render_pass_index];
319 AppendRenderPassQuads(render_pass,
320 delegated_render_pass,
321 frame_size);
325 void DelegatedRendererLayerImpl::AppendRainbowDebugBorder(
326 RenderPass* render_pass) {
327 if (!ShowDebugBorders())
328 return;
330 SharedQuadState* shared_quad_state =
331 render_pass->CreateAndAppendSharedQuadState();
332 PopulateSharedQuadState(shared_quad_state);
334 SkColor color;
335 float border_width;
336 GetDebugBorderProperties(&color, &border_width);
338 SkColor colors[] = {
339 0x80ff0000, // Red.
340 0x80ffa500, // Orange.
341 0x80ffff00, // Yellow.
342 0x80008000, // Green.
343 0x800000ff, // Blue.
344 0x80ee82ee, // Violet.
346 const int kNumColors = arraysize(colors);
348 const int kStripeWidth = 300;
349 const int kStripeHeight = 300;
351 for (int i = 0;; ++i) {
352 // For horizontal lines.
353 int x = kStripeWidth * i;
354 int width = std::min(kStripeWidth, bounds().width() - x - 1);
356 // For vertical lines.
357 int y = kStripeHeight * i;
358 int height = std::min(kStripeHeight, bounds().height() - y - 1);
360 gfx::Rect top(x, 0, width, border_width);
361 gfx::Rect bottom(x, bounds().height() - border_width, width, border_width);
362 gfx::Rect left(0, y, border_width, height);
363 gfx::Rect right(bounds().width() - border_width, y, border_width, height);
365 if (top.IsEmpty() && left.IsEmpty())
366 break;
368 if (!top.IsEmpty()) {
369 bool force_anti_aliasing_off = false;
370 SolidColorDrawQuad* top_quad =
371 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
372 top_quad->SetNew(shared_quad_state, top, top, colors[i % kNumColors],
373 force_anti_aliasing_off);
375 SolidColorDrawQuad* bottom_quad =
376 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
377 bottom_quad->SetNew(shared_quad_state, bottom, bottom,
378 colors[kNumColors - 1 - (i % kNumColors)],
379 force_anti_aliasing_off);
381 if (contents_opaque()) {
382 // Draws a stripe filling the layer vertically with the same color and
383 // width as the horizontal stipes along the layer's top border.
384 SolidColorDrawQuad* solid_quad =
385 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
386 // The inner fill is more transparent then the border.
387 static const float kFillOpacity = 0.1f;
388 SkColor fill_color = SkColorSetA(
389 colors[i % kNumColors],
390 static_cast<uint8_t>(SkColorGetA(colors[i % kNumColors]) *
391 kFillOpacity));
392 gfx::Rect fill_rect(x, 0, width, bounds().height());
393 solid_quad->SetNew(shared_quad_state, fill_rect, fill_rect, fill_color,
394 force_anti_aliasing_off);
397 if (!left.IsEmpty()) {
398 bool force_anti_aliasing_off = false;
399 SolidColorDrawQuad* left_quad =
400 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
401 left_quad->SetNew(shared_quad_state, left, left,
402 colors[kNumColors - 1 - (i % kNumColors)],
403 force_anti_aliasing_off);
405 SolidColorDrawQuad* right_quad =
406 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
407 right_quad->SetNew(shared_quad_state, right, right,
408 colors[i % kNumColors], force_anti_aliasing_off);
413 void DelegatedRendererLayerImpl::AppendRenderPassQuads(
414 RenderPass* render_pass,
415 const RenderPass* delegated_render_pass,
416 const gfx::Size& frame_size) const {
417 const SharedQuadState* delegated_shared_quad_state = nullptr;
418 SharedQuadState* output_shared_quad_state = nullptr;
420 gfx::Transform delegated_frame_to_target_transform = draw_transform();
421 delegated_frame_to_target_transform.Scale(inverse_device_scale_factor_,
422 inverse_device_scale_factor_);
423 bool is_root_delegated_render_pass =
424 delegated_render_pass == render_passes_in_draw_order_.back();
425 for (const auto& delegated_quad : delegated_render_pass->quad_list) {
426 if (delegated_quad->shared_quad_state != delegated_shared_quad_state) {
427 delegated_shared_quad_state = delegated_quad->shared_quad_state;
428 output_shared_quad_state = render_pass->CreateAndAppendSharedQuadState();
429 output_shared_quad_state->CopyFrom(delegated_shared_quad_state);
431 if (is_root_delegated_render_pass) {
432 output_shared_quad_state->quad_to_target_transform.ConcatTransform(
433 delegated_frame_to_target_transform);
435 if (render_target() == this) {
436 DCHECK(!is_clipped());
437 DCHECK(render_surface());
438 DCHECK_EQ(0u, num_unclipped_descendants());
439 output_shared_quad_state->clip_rect =
440 MathUtil::MapEnclosingClippedRect(
441 delegated_frame_to_target_transform,
442 output_shared_quad_state->clip_rect);
443 } else {
444 gfx::Rect clip_rect = drawable_content_rect();
445 if (output_shared_quad_state->is_clipped) {
446 clip_rect.Intersect(MathUtil::MapEnclosingClippedRect(
447 delegated_frame_to_target_transform,
448 output_shared_quad_state->clip_rect));
450 output_shared_quad_state->clip_rect = clip_rect;
451 output_shared_quad_state->is_clipped = true;
454 output_shared_quad_state->opacity *= draw_opacity();
457 DCHECK(output_shared_quad_state);
459 gfx::Transform quad_content_to_delegated_target_space =
460 output_shared_quad_state->quad_to_target_transform;
461 if (!is_root_delegated_render_pass) {
462 quad_content_to_delegated_target_space.ConcatTransform(
463 delegated_render_pass->transform_to_root_target);
464 quad_content_to_delegated_target_space.ConcatTransform(
465 delegated_frame_to_target_transform);
468 Occlusion occlusion_in_quad_space =
469 draw_properties()
470 .occlusion_in_content_space.GetOcclusionWithGivenDrawTransform(
471 quad_content_to_delegated_target_space);
473 gfx::Rect quad_visible_rect =
474 occlusion_in_quad_space.GetUnoccludedContentRect(
475 delegated_quad->visible_rect);
477 if (quad_visible_rect.IsEmpty())
478 continue;
480 if (delegated_quad->material != DrawQuad::RENDER_PASS) {
481 DrawQuad* output_quad = render_pass->CopyFromAndAppendDrawQuad(
482 delegated_quad, output_shared_quad_state);
483 output_quad->visible_rect = quad_visible_rect;
484 ValidateQuadResources(output_quad);
485 } else {
486 RenderPassId delegated_contributing_render_pass_id =
487 RenderPassDrawQuad::MaterialCast(delegated_quad)->render_pass_id;
488 RenderPassId output_contributing_render_pass_id;
490 bool present =
491 ConvertDelegatedRenderPassId(delegated_contributing_render_pass_id,
492 &output_contributing_render_pass_id);
493 // |present| being false means the child compositor sent an invalid frame.
494 DCHECK(present);
495 DCHECK(output_contributing_render_pass_id != render_pass->id);
497 RenderPassDrawQuad* output_quad =
498 render_pass->CopyFromAndAppendRenderPassDrawQuad(
499 RenderPassDrawQuad::MaterialCast(delegated_quad),
500 output_shared_quad_state, output_contributing_render_pass_id);
501 output_quad->visible_rect = quad_visible_rect;
502 ValidateQuadResources(output_quad);
507 const char* DelegatedRendererLayerImpl::LayerTypeAsString() const {
508 return "cc::DelegatedRendererLayerImpl";
511 void DelegatedRendererLayerImpl::ClearChildId() {
512 if (!child_id_)
513 return;
515 if (own_child_id_) {
516 ResourceProvider* provider = layer_tree_impl()->resource_provider();
517 provider->DestroyChild(child_id_);
520 resources_.clear();
521 child_id_ = 0;
524 } // namespace cc