Extensions: Store disable reasons in Sync
[chromium-blink-merge.git] / remoting / codec / video_decoder_vpx.cc
blobe16be140e325c0ed5408283285848727e3a1428c
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "remoting/codec/video_decoder_vpx.h"
7 #include <math.h>
9 #include <algorithm>
11 #include "base/logging.h"
12 #include "media/base/media.h"
13 #include "media/base/yuv_convert.h"
14 #include "remoting/base/util.h"
15 #include "third_party/libyuv/include/libyuv/convert_argb.h"
17 extern "C" {
18 #define VPX_CODEC_DISABLE_COMPAT 1
19 #include "third_party/libvpx/source/libvpx/vpx/vpx_decoder.h"
20 #include "third_party/libvpx/source/libvpx/vpx/vp8dx.h"
23 namespace remoting {
25 namespace {
27 const uint32 kTransparentColor = 0;
29 // Fills the rectangle |rect| with the given ARGB color |color| in |buffer|.
30 void FillRect(uint8* buffer,
31 int stride,
32 const webrtc::DesktopRect& rect,
33 uint32 color) {
34 uint32* ptr = reinterpret_cast<uint32*>(buffer + (rect.top() * stride) +
35 (rect.left() * VideoDecoder::kBytesPerPixel));
36 int width = rect.width();
37 for (int height = rect.height(); height > 0; --height) {
38 std::fill(ptr, ptr + width, color);
39 ptr += stride / VideoDecoder::kBytesPerPixel;
43 } // namespace
45 // static
46 scoped_ptr<VideoDecoderVpx> VideoDecoderVpx::CreateForVP8() {
47 ScopedVpxCodec codec(new vpx_codec_ctx_t);
49 // TODO(hclam): Scale the number of threads with number of cores of the
50 // machine.
51 vpx_codec_dec_cfg config;
52 config.w = 0;
53 config.h = 0;
54 config.threads = 2;
55 vpx_codec_err_t ret =
56 vpx_codec_dec_init(codec.get(), vpx_codec_vp8_dx(), &config, 0);
57 if (ret != VPX_CODEC_OK) {
58 LOG(ERROR) << "Cannot initialize codec.";
59 return nullptr;
62 return make_scoped_ptr(new VideoDecoderVpx(codec.Pass()));
65 // static
66 scoped_ptr<VideoDecoderVpx> VideoDecoderVpx::CreateForVP9() {
67 ScopedVpxCodec codec(new vpx_codec_ctx_t);
69 // TODO(hclam): Scale the number of threads with number of cores of the
70 // machine.
71 vpx_codec_dec_cfg config;
72 config.w = 0;
73 config.h = 0;
74 config.threads = 2;
75 vpx_codec_err_t ret =
76 vpx_codec_dec_init(codec.get(), vpx_codec_vp9_dx(), &config, 0);
77 if (ret != VPX_CODEC_OK) {
78 LOG(ERROR) << "Cannot initialize codec.";
79 return nullptr;
82 return make_scoped_ptr(new VideoDecoderVpx(codec.Pass()));
85 VideoDecoderVpx::~VideoDecoderVpx() {}
87 void VideoDecoderVpx::Initialize(const webrtc::DesktopSize& screen_size) {
88 DCHECK(!screen_size.is_empty());
90 screen_size_ = screen_size;
92 transparent_region_.SetRect(webrtc::DesktopRect::MakeSize(screen_size_));
95 bool VideoDecoderVpx::DecodePacket(const VideoPacket& packet) {
96 DCHECK(!screen_size_.is_empty());
98 // Do the actual decoding.
99 vpx_codec_err_t ret = vpx_codec_decode(
100 codec_.get(), reinterpret_cast<const uint8*>(packet.data().data()),
101 packet.data().size(), NULL, 0);
102 if (ret != VPX_CODEC_OK) {
103 const char* error = vpx_codec_error(codec_.get());
104 const char* error_detail = vpx_codec_error_detail(codec_.get());
105 LOG(ERROR) << "Decoding failed:" << (error ? error : "(NULL)") << "\n"
106 << "Details: " << (error_detail ? error_detail : "(NULL)");
107 return false;
110 // Gets the decoded data.
111 vpx_codec_iter_t iter = NULL;
112 vpx_image_t* image = vpx_codec_get_frame(codec_.get(), &iter);
113 if (!image) {
114 LOG(ERROR) << "No video frame decoded";
115 return false;
117 last_image_ = image;
119 webrtc::DesktopRegion region;
120 for (int i = 0; i < packet.dirty_rects_size(); ++i) {
121 Rect remoting_rect = packet.dirty_rects(i);
122 region.AddRect(webrtc::DesktopRect::MakeXYWH(
123 remoting_rect.x(), remoting_rect.y(),
124 remoting_rect.width(), remoting_rect.height()));
127 updated_region_.AddRegion(region);
129 // Update the desktop shape region.
130 webrtc::DesktopRegion desktop_shape_region;
131 if (packet.has_use_desktop_shape()) {
132 for (int i = 0; i < packet.desktop_shape_rects_size(); ++i) {
133 Rect remoting_rect = packet.desktop_shape_rects(i);
134 desktop_shape_region.AddRect(webrtc::DesktopRect::MakeXYWH(
135 remoting_rect.x(), remoting_rect.y(),
136 remoting_rect.width(), remoting_rect.height()));
138 } else {
139 // Fallback for the case when the host didn't include the desktop shape
140 // region.
141 desktop_shape_region =
142 webrtc::DesktopRegion(webrtc::DesktopRect::MakeSize(screen_size_));
145 UpdateImageShapeRegion(&desktop_shape_region);
147 return true;
150 void VideoDecoderVpx::Invalidate(const webrtc::DesktopSize& view_size,
151 const webrtc::DesktopRegion& region) {
152 DCHECK(!view_size.is_empty());
154 for (webrtc::DesktopRegion::Iterator i(region); !i.IsAtEnd(); i.Advance()) {
155 updated_region_.AddRect(ScaleRect(i.rect(), view_size, screen_size_));
158 // Updated areas outside of the new desktop shape region should be made
159 // transparent, not repainted.
160 webrtc::DesktopRegion difference = updated_region_;
161 difference.Subtract(desktop_shape_);
162 updated_region_.Subtract(difference);
163 transparent_region_.AddRegion(difference);
166 void VideoDecoderVpx::RenderFrame(const webrtc::DesktopSize& view_size,
167 const webrtc::DesktopRect& clip_area,
168 uint8* image_buffer,
169 int image_stride,
170 webrtc::DesktopRegion* output_region) {
171 DCHECK(!screen_size_.is_empty());
172 DCHECK(!view_size.is_empty());
174 // Early-return and do nothing if we haven't yet decoded any frames.
175 if (!last_image_)
176 return;
178 webrtc::DesktopRect source_clip =
179 webrtc::DesktopRect::MakeWH(last_image_->d_w, last_image_->d_h);
181 // VP8 only outputs I420 frames, but VP9 can also produce I444.
182 switch (last_image_->fmt) {
183 case VPX_IMG_FMT_I444: {
184 // TODO(wez): Add scaling support to the I444 conversion path.
185 if (view_size.equals(screen_size_)) {
186 for (webrtc::DesktopRegion::Iterator i(updated_region_);
187 !i.IsAtEnd(); i.Advance()) {
188 // Determine the scaled area affected by this rectangle changing.
189 webrtc::DesktopRect rect = i.rect();
190 rect.IntersectWith(source_clip);
191 rect.IntersectWith(clip_area);
192 if (rect.is_empty())
193 continue;
195 int image_offset = image_stride * rect.top() +
196 rect.left() * VideoDecoder::kBytesPerPixel;
197 int y_offset = last_image_->stride[0] * rect.top() + rect.left();
198 int u_offset = last_image_->stride[1] * rect.top() + rect.left();
199 int v_offset = last_image_->stride[2] * rect.top() + rect.left();
200 libyuv::I444ToARGB(last_image_->planes[0] + y_offset,
201 last_image_->stride[0],
202 last_image_->planes[1] + u_offset,
203 last_image_->stride[1],
204 last_image_->planes[2] + v_offset,
205 last_image_->stride[2],
206 image_buffer + image_offset, image_stride,
207 rect.width(), rect.height());
209 output_region->AddRect(rect);
212 break;
214 case VPX_IMG_FMT_I420: {
215 // ScaleYUVToRGB32WithRect does not currently support up-scaling. We
216 // won't be asked to up-scale except during resizes or if page zoom is
217 // >100%, so we work-around the limitation by using the slower
218 // ScaleYUVToRGB32.
219 // TODO(wez): Remove this hack if/when ScaleYUVToRGB32WithRect can
220 // up-scale.
221 if (!updated_region_.is_empty() &&
222 (source_clip.width() < view_size.width() ||
223 source_clip.height() < view_size.height())) {
224 // We're scaling only |clip_area| into the |image_buffer|, so we need to
225 // work out which source rectangle that corresponds to.
226 webrtc::DesktopRect source_rect =
227 ScaleRect(clip_area, view_size, screen_size_);
228 source_rect = webrtc::DesktopRect::MakeLTRB(
229 RoundToTwosMultiple(source_rect.left()),
230 RoundToTwosMultiple(source_rect.top()),
231 source_rect.right(),
232 source_rect.bottom());
234 // If there were no changes within the clip source area then don't
235 // render.
236 webrtc::DesktopRegion intersection(source_rect);
237 intersection.IntersectWith(updated_region_);
238 if (intersection.is_empty())
239 return;
241 // Scale & convert the entire clip area.
242 int y_offset = CalculateYOffset(source_rect.left(), source_rect.top(),
243 last_image_->stride[0]);
244 int uv_offset = CalculateUVOffset(source_rect.left(), source_rect.top(),
245 last_image_->stride[1]);
246 ScaleYUVToRGB32(last_image_->planes[0] + y_offset,
247 last_image_->planes[1] + uv_offset,
248 last_image_->planes[2] + uv_offset,
249 image_buffer,
250 source_rect.width(),
251 source_rect.height(),
252 clip_area.width(),
253 clip_area.height(),
254 last_image_->stride[0],
255 last_image_->stride[1],
256 image_stride,
257 media::YV12,
258 media::ROTATE_0,
259 media::FILTER_BILINEAR);
261 output_region->AddRect(clip_area);
262 updated_region_.Subtract(source_rect);
263 return;
266 for (webrtc::DesktopRegion::Iterator i(updated_region_);
267 !i.IsAtEnd(); i.Advance()) {
268 // Determine the scaled area affected by this rectangle changing.
269 webrtc::DesktopRect rect = i.rect();
270 rect.IntersectWith(source_clip);
271 if (rect.is_empty())
272 continue;
273 rect = ScaleRect(rect, screen_size_, view_size);
274 rect.IntersectWith(clip_area);
275 if (rect.is_empty())
276 continue;
278 ConvertAndScaleYUVToRGB32Rect(last_image_->planes[0],
279 last_image_->planes[1],
280 last_image_->planes[2],
281 last_image_->stride[0],
282 last_image_->stride[1],
283 screen_size_,
284 source_clip,
285 image_buffer,
286 image_stride,
287 view_size,
288 clip_area,
289 rect);
291 output_region->AddRect(rect);
294 updated_region_.Subtract(ScaleRect(clip_area, view_size, screen_size_));
295 break;
297 default: {
298 LOG(ERROR) << "Unsupported image format:" << last_image_->fmt;
299 return;
303 for (webrtc::DesktopRegion::Iterator i(transparent_region_);
304 !i.IsAtEnd(); i.Advance()) {
305 // Determine the scaled area affected by this rectangle changing.
306 webrtc::DesktopRect rect = i.rect();
307 rect.IntersectWith(source_clip);
308 if (rect.is_empty())
309 continue;
310 rect = ScaleRect(rect, screen_size_, view_size);
311 rect.IntersectWith(clip_area);
312 if (rect.is_empty())
313 continue;
315 // Fill the rectange with transparent pixels.
316 FillRect(image_buffer, image_stride, rect, kTransparentColor);
317 output_region->AddRect(rect);
320 webrtc::DesktopRect scaled_clip_area =
321 ScaleRect(clip_area, view_size, screen_size_);
322 updated_region_.Subtract(scaled_clip_area);
323 transparent_region_.Subtract(scaled_clip_area);
326 const webrtc::DesktopRegion* VideoDecoderVpx::GetImageShape() {
327 return &desktop_shape_;
330 VideoDecoderVpx::VideoDecoderVpx(ScopedVpxCodec codec)
331 : codec_(codec.Pass()),
332 last_image_(NULL) {
333 DCHECK(codec_);
336 void VideoDecoderVpx::UpdateImageShapeRegion(
337 webrtc::DesktopRegion* new_desktop_shape) {
338 // Add all areas that have been updated or become transparent to the
339 // transparent region. Exclude anything within the new desktop shape.
340 transparent_region_.AddRegion(desktop_shape_);
341 transparent_region_.AddRegion(updated_region_);
342 transparent_region_.Subtract(*new_desktop_shape);
344 // Add newly exposed areas to the update region and limit updates to the new
345 // desktop shape.
346 webrtc::DesktopRegion difference = *new_desktop_shape;
347 difference.Subtract(desktop_shape_);
348 updated_region_.AddRegion(difference);
349 updated_region_.IntersectWith(*new_desktop_shape);
351 // Set the new desktop shape region.
352 desktop_shape_.Swap(new_desktop_shape);
355 } // namespace remoting