Adding self to OWNERS for Chrome on Android.
[chromium-blink-merge.git] / remoting / client / software_video_renderer.cc
blob49c768307150edb48983db4603016d3107cd8343
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "remoting/client/software_video_renderer.h"
7 #include <list>
9 #include "base/bind.h"
10 #include "base/callback.h"
11 #include "base/callback_helpers.h"
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/single_thread_task_runner.h"
15 #include "remoting/base/util.h"
16 #include "remoting/client/frame_consumer.h"
17 #include "remoting/codec/video_decoder.h"
18 #include "remoting/codec/video_decoder_verbatim.h"
19 #include "remoting/codec/video_decoder_vpx.h"
20 #include "remoting/protocol/session_config.h"
21 #include "third_party/libyuv/include/libyuv/convert_argb.h"
22 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
24 using base::Passed;
25 using remoting::protocol::ChannelConfig;
26 using remoting::protocol::SessionConfig;
28 namespace remoting {
30 // This class wraps a VideoDecoder and byte-swaps the pixels for compatibility
31 // with the android.graphics.Bitmap class.
32 // TODO(lambroslambrou): Refactor so that the VideoDecoder produces data
33 // in the right byte-order, instead of swapping it here.
34 class RgbToBgrVideoDecoderFilter : public VideoDecoder {
35 public:
36 RgbToBgrVideoDecoderFilter(scoped_ptr<VideoDecoder> parent)
37 : parent_(parent.Pass()) {
40 void Initialize(const webrtc::DesktopSize& screen_size) override {
41 parent_->Initialize(screen_size);
44 bool DecodePacket(const VideoPacket& packet) override {
45 return parent_->DecodePacket(packet);
48 void Invalidate(const webrtc::DesktopSize& view_size,
49 const webrtc::DesktopRegion& region) override {
50 return parent_->Invalidate(view_size, region);
53 void RenderFrame(const webrtc::DesktopSize& view_size,
54 const webrtc::DesktopRect& clip_area,
55 uint8* image_buffer,
56 int image_stride,
57 webrtc::DesktopRegion* output_region) override {
58 parent_->RenderFrame(view_size, clip_area, image_buffer, image_stride,
59 output_region);
61 for (webrtc::DesktopRegion::Iterator i(*output_region); !i.IsAtEnd();
62 i.Advance()) {
63 webrtc::DesktopRect rect = i.rect();
64 uint8* pixels = image_buffer + (rect.top() * image_stride) +
65 (rect.left() * kBytesPerPixel);
66 libyuv::ABGRToARGB(pixels, image_stride, pixels, image_stride,
67 rect.width(), rect.height());
71 const webrtc::DesktopRegion* GetImageShape() override {
72 return parent_->GetImageShape();
75 private:
76 scoped_ptr<VideoDecoder> parent_;
79 class SoftwareVideoRenderer::Core {
80 public:
81 Core(scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
82 scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner,
83 scoped_refptr<FrameConsumerProxy> consumer);
84 ~Core();
86 void OnSessionConfig(const protocol::SessionConfig& config);
87 void DrawBuffer(webrtc::DesktopFrame* buffer);
88 void InvalidateRegion(const webrtc::DesktopRegion& region);
89 void RequestReturnBuffers(const base::Closure& done);
90 void SetOutputSizeAndClip(
91 const webrtc::DesktopSize& view_size,
92 const webrtc::DesktopRect& clip_area);
94 // Decodes the contents of |packet|. DecodePacket may keep a reference to
95 // |packet| so the |packet| must remain alive and valid until |done| is
96 // executed.
97 void DecodePacket(scoped_ptr<VideoPacket> packet, const base::Closure& done);
99 private:
100 // Paints the invalidated region to the next available buffer and returns it
101 // to the consumer.
102 void SchedulePaint();
103 void DoPaint();
105 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
106 scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner_;
107 scoped_refptr<FrameConsumerProxy> consumer_;
108 scoped_ptr<VideoDecoder> decoder_;
110 // Remote screen size in pixels.
111 webrtc::DesktopSize source_size_;
113 // Vertical and horizontal DPI of the remote screen.
114 webrtc::DesktopVector source_dpi_;
116 // The current dimensions of the frame consumer view.
117 webrtc::DesktopSize view_size_;
118 webrtc::DesktopRect clip_area_;
120 // The drawing buffers supplied by the frame consumer.
121 std::list<webrtc::DesktopFrame*> buffers_;
123 // Flag used to coalesce runs of SchedulePaint()s into a single DoPaint().
124 bool paint_scheduled_;
126 base::WeakPtrFactory<Core> weak_factory_;
129 SoftwareVideoRenderer::Core::Core(
130 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
131 scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner,
132 scoped_refptr<FrameConsumerProxy> consumer)
133 : main_task_runner_(main_task_runner),
134 decode_task_runner_(decode_task_runner),
135 consumer_(consumer),
136 paint_scheduled_(false),
137 weak_factory_(this) {
140 SoftwareVideoRenderer::Core::~Core() {
143 void SoftwareVideoRenderer::Core::OnSessionConfig(const SessionConfig& config) {
144 DCHECK(decode_task_runner_->BelongsToCurrentThread());
146 // Initialize decoder based on the selected codec.
147 ChannelConfig::Codec codec = config.video_config().codec;
148 if (codec == ChannelConfig::CODEC_VERBATIM) {
149 decoder_.reset(new VideoDecoderVerbatim());
150 } else if (codec == ChannelConfig::CODEC_VP8) {
151 decoder_ = VideoDecoderVpx::CreateForVP8();
152 } else if (codec == ChannelConfig::CODEC_VP9) {
153 decoder_ = VideoDecoderVpx::CreateForVP9();
154 } else {
155 NOTREACHED() << "Invalid Encoding found: " << codec;
158 if (consumer_->GetPixelFormat() == FrameConsumer::FORMAT_RGBA) {
159 scoped_ptr<VideoDecoder> wrapper(
160 new RgbToBgrVideoDecoderFilter(decoder_.Pass()));
161 decoder_ = wrapper.Pass();
165 void SoftwareVideoRenderer::Core::DecodePacket(scoped_ptr<VideoPacket> packet,
166 const base::Closure& done) {
167 DCHECK(decode_task_runner_->BelongsToCurrentThread());
169 bool decoder_needs_reset = false;
170 bool notify_size_or_dpi_change = false;
172 // If the packet includes screen size or DPI information, store them.
173 if (packet->format().has_screen_width() &&
174 packet->format().has_screen_height()) {
175 webrtc::DesktopSize source_size(packet->format().screen_width(),
176 packet->format().screen_height());
177 if (!source_size_.equals(source_size)) {
178 source_size_ = source_size;
179 decoder_needs_reset = true;
180 notify_size_or_dpi_change = true;
183 if (packet->format().has_x_dpi() && packet->format().has_y_dpi()) {
184 webrtc::DesktopVector source_dpi(packet->format().x_dpi(),
185 packet->format().y_dpi());
186 if (!source_dpi.equals(source_dpi_)) {
187 source_dpi_ = source_dpi;
188 notify_size_or_dpi_change = true;
192 // If we've never seen a screen size, ignore the packet.
193 if (source_size_.is_empty()) {
194 main_task_runner_->PostTask(FROM_HERE, base::Bind(done));
195 return;
198 if (decoder_needs_reset)
199 decoder_->Initialize(source_size_);
200 if (notify_size_or_dpi_change)
201 consumer_->SetSourceSize(source_size_, source_dpi_);
203 if (decoder_->DecodePacket(*packet.get())) {
204 SchedulePaint();
205 } else {
206 LOG(ERROR) << "DecodePacket() failed.";
209 main_task_runner_->PostTask(FROM_HERE, base::Bind(done));
212 void SoftwareVideoRenderer::Core::SchedulePaint() {
213 DCHECK(decode_task_runner_->BelongsToCurrentThread());
214 if (paint_scheduled_)
215 return;
216 paint_scheduled_ = true;
217 decode_task_runner_->PostTask(
218 FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::DoPaint,
219 weak_factory_.GetWeakPtr()));
222 void SoftwareVideoRenderer::Core::DoPaint() {
223 DCHECK(decode_task_runner_->BelongsToCurrentThread());
224 DCHECK(paint_scheduled_);
225 paint_scheduled_ = false;
227 // If the view size is empty or we have no output buffers ready, return.
228 if (buffers_.empty() || view_size_.is_empty())
229 return;
231 // If no Decoder is initialized, or the host dimensions are empty, return.
232 if (!decoder_.get() || source_size_.is_empty())
233 return;
235 // Draw the invalidated region to the buffer.
236 webrtc::DesktopFrame* buffer = buffers_.front();
237 webrtc::DesktopRegion output_region;
238 decoder_->RenderFrame(view_size_, clip_area_,
239 buffer->data(), buffer->stride(), &output_region);
241 // Notify the consumer that painting is done.
242 if (!output_region.is_empty()) {
243 buffers_.pop_front();
244 consumer_->ApplyBuffer(view_size_, clip_area_, buffer, output_region,
245 *decoder_->GetImageShape());
249 void SoftwareVideoRenderer::Core::RequestReturnBuffers(
250 const base::Closure& done) {
251 DCHECK(decode_task_runner_->BelongsToCurrentThread());
253 while (!buffers_.empty()) {
254 consumer_->ReturnBuffer(buffers_.front());
255 buffers_.pop_front();
258 if (!done.is_null())
259 done.Run();
262 void SoftwareVideoRenderer::Core::DrawBuffer(webrtc::DesktopFrame* buffer) {
263 DCHECK(decode_task_runner_->BelongsToCurrentThread());
264 DCHECK(clip_area_.width() <= buffer->size().width() &&
265 clip_area_.height() <= buffer->size().height());
267 buffers_.push_back(buffer);
268 SchedulePaint();
271 void SoftwareVideoRenderer::Core::InvalidateRegion(
272 const webrtc::DesktopRegion& region) {
273 DCHECK(decode_task_runner_->BelongsToCurrentThread());
275 if (decoder_.get()) {
276 decoder_->Invalidate(view_size_, region);
277 SchedulePaint();
281 void SoftwareVideoRenderer::Core::SetOutputSizeAndClip(
282 const webrtc::DesktopSize& view_size,
283 const webrtc::DesktopRect& clip_area) {
284 DCHECK(decode_task_runner_->BelongsToCurrentThread());
286 // The whole frame needs to be repainted if the scaling factor has changed.
287 if (!view_size_.equals(view_size) && decoder_.get()) {
288 webrtc::DesktopRegion region;
289 region.AddRect(webrtc::DesktopRect::MakeSize(view_size));
290 decoder_->Invalidate(view_size, region);
293 if (!view_size_.equals(view_size) ||
294 !clip_area_.equals(clip_area)) {
295 view_size_ = view_size;
296 clip_area_ = clip_area;
298 // Return buffers that are smaller than needed to the consumer for
299 // reuse/reallocation.
300 std::list<webrtc::DesktopFrame*>::iterator i = buffers_.begin();
301 while (i != buffers_.end()) {
302 if ((*i)->size().width() < clip_area_.width() ||
303 (*i)->size().height() < clip_area_.height()) {
304 consumer_->ReturnBuffer(*i);
305 i = buffers_.erase(i);
306 } else {
307 ++i;
311 SchedulePaint();
315 SoftwareVideoRenderer::SoftwareVideoRenderer(
316 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
317 scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner,
318 scoped_refptr<FrameConsumerProxy> consumer)
319 : decode_task_runner_(decode_task_runner),
320 core_(new Core(main_task_runner, decode_task_runner, consumer)),
321 latest_event_timestamp_(0),
322 weak_factory_(this) {
323 DCHECK(CalledOnValidThread());
326 SoftwareVideoRenderer::~SoftwareVideoRenderer() {
327 DCHECK(CalledOnValidThread());
328 decode_task_runner_->DeleteSoon(FROM_HERE, core_.release());
331 void SoftwareVideoRenderer::OnSessionConfig(
332 const protocol::SessionConfig& config) {
333 DCHECK(CalledOnValidThread());
334 decode_task_runner_->PostTask(
335 FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::OnSessionConfig,
336 base::Unretained(core_.get()), config));
339 ChromotingStats* SoftwareVideoRenderer::GetStats() {
340 DCHECK(CalledOnValidThread());
341 return &stats_;
344 void SoftwareVideoRenderer::ProcessVideoPacket(scoped_ptr<VideoPacket> packet,
345 const base::Closure& done) {
346 DCHECK(CalledOnValidThread());
348 // If the video packet is empty then drop it. Empty packets are used to
349 // maintain activity on the network.
350 if (!packet->has_data() || packet->data().size() == 0) {
351 done.Run();
352 return;
355 // Add one frame to the counter.
356 stats_.video_frame_rate()->Record(1);
358 // Record other statistics received from host.
359 stats_.video_bandwidth()->Record(packet->data().size());
360 if (packet->has_capture_time_ms())
361 stats_.video_capture_ms()->Record(packet->capture_time_ms());
362 if (packet->has_encode_time_ms())
363 stats_.video_encode_ms()->Record(packet->encode_time_ms());
364 if (packet->has_latest_event_timestamp() &&
365 packet->latest_event_timestamp() > latest_event_timestamp_) {
366 latest_event_timestamp_ = packet->latest_event_timestamp();
367 base::TimeDelta round_trip_latency =
368 base::Time::Now() -
369 base::Time::FromInternalValue(packet->latest_event_timestamp());
370 stats_.round_trip_ms()->Record(round_trip_latency.InMilliseconds());
373 // Measure the latency between the last packet being received and presented.
374 base::Time decode_start = base::Time::Now();
376 base::Closure decode_done = base::Bind(&SoftwareVideoRenderer::OnPacketDone,
377 weak_factory_.GetWeakPtr(),
378 decode_start, done);
380 decode_task_runner_->PostTask(FROM_HERE, base::Bind(
381 &SoftwareVideoRenderer::Core::DecodePacket,
382 base::Unretained(core_.get()), base::Passed(&packet), decode_done));
385 void SoftwareVideoRenderer::DrawBuffer(webrtc::DesktopFrame* buffer) {
386 decode_task_runner_->PostTask(
387 FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::DrawBuffer,
388 base::Unretained(core_.get()), buffer));
391 void SoftwareVideoRenderer::InvalidateRegion(
392 const webrtc::DesktopRegion& region) {
393 decode_task_runner_->PostTask(
394 FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::InvalidateRegion,
395 base::Unretained(core_.get()), region));
398 void SoftwareVideoRenderer::RequestReturnBuffers(const base::Closure& done) {
399 decode_task_runner_->PostTask(
400 FROM_HERE,
401 base::Bind(&SoftwareVideoRenderer::Core::RequestReturnBuffers,
402 base::Unretained(core_.get()), done));
405 void SoftwareVideoRenderer::SetOutputSizeAndClip(
406 const webrtc::DesktopSize& view_size,
407 const webrtc::DesktopRect& clip_area) {
408 decode_task_runner_->PostTask(
409 FROM_HERE,
410 base::Bind(&SoftwareVideoRenderer::Core::SetOutputSizeAndClip,
411 base::Unretained(core_.get()), view_size, clip_area));
414 void SoftwareVideoRenderer::OnPacketDone(base::Time decode_start,
415 const base::Closure& done) {
416 DCHECK(CalledOnValidThread());
418 // Record the latency between the packet being received and presented.
419 stats_.video_decode_ms()->Record(
420 (base::Time::Now() - decode_start).InMilliseconds());
422 done.Run();
425 } // namespace remoting