Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / media / capture / video / fake_video_capture_device.cc
blob6016c8e201044d348444bf8272185a5e33e8e63f
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/capture/video/fake_video_capture_device.h"
7 #include <algorithm>
9 #include "base/bind.h"
10 #include "base/strings/stringprintf.h"
11 #include "media/audio/fake_audio_input_stream.h"
12 #include "media/base/video_frame.h"
13 #include "third_party/skia/include/core/SkBitmap.h"
14 #include "third_party/skia/include/core/SkCanvas.h"
15 #include "third_party/skia/include/core/SkPaint.h"
17 namespace media {
19 static const int kFakeCaptureBeepCycle = 10; // Visual beep every 0.5s.
21 void DrawPacman(bool use_argb,
22 uint8_t* const data,
23 int frame_count,
24 int frame_interval,
25 const gfx::Size& frame_size) {
26 // |kN32_SkColorType| stands for the appropriiate RGBA/BGRA format.
27 const SkColorType colorspace =
28 use_argb ? kN32_SkColorType : kAlpha_8_SkColorType;
29 const SkImageInfo info = SkImageInfo::Make(
30 frame_size.width(), frame_size.height(), colorspace, kOpaque_SkAlphaType);
31 SkBitmap bitmap;
32 bitmap.setInfo(info);
33 bitmap.setPixels(data);
34 SkPaint paint;
35 paint.setStyle(SkPaint::kFill_Style);
36 SkCanvas canvas(bitmap);
38 // Equalize Alpha_8 that has light green background while RGBA has white.
39 if (use_argb) {
40 const SkRect full_frame =
41 SkRect::MakeWH(frame_size.width(), frame_size.height());
42 paint.setARGB(255, 0, 127, 0);
43 canvas.drawRect(full_frame, paint);
45 paint.setColor(SK_ColorGREEN);
47 // Draw a sweeping circle to show an animation.
48 const int end_angle = (3 * kFakeCaptureBeepCycle * frame_count % 361);
49 const int radius = std::min(frame_size.width(), frame_size.height()) / 4;
50 const SkRect rect = SkRect::MakeXYWH(frame_size.width() / 2 - radius,
51 frame_size.height() / 2 - radius,
52 2 * radius, 2 * radius);
53 canvas.drawArc(rect, 0, end_angle, true, paint);
55 // Draw current time.
56 const int elapsed_ms = frame_interval * frame_count;
57 const int milliseconds = elapsed_ms % 1000;
58 const int seconds = (elapsed_ms / 1000) % 60;
59 const int minutes = (elapsed_ms / 1000 / 60) % 60;
60 const int hours = (elapsed_ms / 1000 / 60 / 60) % 60;
62 const std::string time_string =
63 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds,
64 milliseconds, frame_count);
65 canvas.scale(3, 3);
66 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint);
69 FakeVideoCaptureDevice::FakeVideoCaptureDevice(BufferOwnership buffer_ownership,
70 BufferPlanarity planarity)
71 : buffer_ownership_(buffer_ownership),
72 planarity_(planarity),
73 frame_count_(0),
74 weak_factory_(this) {}
76 FakeVideoCaptureDevice::~FakeVideoCaptureDevice() {
77 DCHECK(thread_checker_.CalledOnValidThread());
80 void FakeVideoCaptureDevice::AllocateAndStart(
81 const VideoCaptureParams& params,
82 scoped_ptr<VideoCaptureDevice::Client> client) {
83 DCHECK(thread_checker_.CalledOnValidThread());
85 client_ = client.Pass();
87 // Incoming |params| can be none of the supported formats, so we get the
88 // closest thing rounded up. TODO(mcasas): Use the |params|, if they belong to
89 // the supported ones, when http://crbug.com/309554 is verified.
90 capture_format_.frame_rate = 30.0;
91 if (params.requested_format.frame_size.width() > 1280)
92 capture_format_.frame_size.SetSize(1920, 1080);
93 else if (params.requested_format.frame_size.width() > 640)
94 capture_format_.frame_size.SetSize(1280, 720);
95 else if (params.requested_format.frame_size.width() > 320)
96 capture_format_.frame_size.SetSize(640, 480);
97 else
98 capture_format_.frame_size.SetSize(320, 240);
100 if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) {
101 if (planarity_ == BufferPlanarity::PACKED) {
102 capture_format_.pixel_storage = PIXEL_STORAGE_CPU;
103 capture_format_.pixel_format = PIXEL_FORMAT_ARGB;
104 DVLOG(1) << "starting with client argb buffers";
105 } else if (planarity_ == BufferPlanarity::TRIPLANAR) {
106 capture_format_.pixel_storage = PIXEL_STORAGE_GPUMEMORYBUFFER;
107 capture_format_.pixel_format = PIXEL_FORMAT_I420;
108 DVLOG(1) << "starting with gmb I420 buffers";
110 } else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) {
111 capture_format_.pixel_storage = PIXEL_STORAGE_CPU;
112 capture_format_.pixel_format = PIXEL_FORMAT_I420;
113 DVLOG(1) << "starting with own I420 buffers";
116 if (capture_format_.pixel_format == PIXEL_FORMAT_I420) {
117 fake_frame_.reset(new uint8[VideoFrame::AllocationSize(
118 PIXEL_FORMAT_I420, capture_format_.frame_size)]);
121 if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS)
122 BeepAndScheduleNextCapture(
123 base::TimeTicks::Now(),
124 base::Bind(&FakeVideoCaptureDevice::CaptureUsingClientBuffers,
125 weak_factory_.GetWeakPtr()));
126 else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS)
127 BeepAndScheduleNextCapture(
128 base::TimeTicks::Now(),
129 base::Bind(&FakeVideoCaptureDevice::CaptureUsingOwnBuffers,
130 weak_factory_.GetWeakPtr()));
133 void FakeVideoCaptureDevice::StopAndDeAllocate() {
134 DCHECK(thread_checker_.CalledOnValidThread());
135 client_.reset();
138 void FakeVideoCaptureDevice::CaptureUsingOwnBuffers(
139 base::TimeTicks expected_execution_time) {
140 DCHECK(thread_checker_.CalledOnValidThread());
141 const size_t frame_size = capture_format_.ImageAllocationSize();
142 memset(fake_frame_.get(), 0, frame_size);
144 DrawPacman(false /* use_argb */, fake_frame_.get(), frame_count_,
145 kFakeCapturePeriodMs, capture_format_.frame_size);
147 // Give the captured frame to the client.
148 if (planarity_ == BufferPlanarity::PACKED) {
149 client_->OnIncomingCapturedData(fake_frame_.get(), frame_size,
150 capture_format_, 0 /* rotation */,
151 base::TimeTicks::Now());
152 } else if (planarity_ == BufferPlanarity::TRIPLANAR) {
153 client_->OnIncomingCapturedYuvData(
154 fake_frame_.get(),
155 fake_frame_.get() + capture_format_.frame_size.GetArea(),
156 fake_frame_.get() + capture_format_.frame_size.GetArea() * 5 / 4,
157 capture_format_.frame_size.width(),
158 capture_format_.frame_size.width() / 2,
159 capture_format_.frame_size.width() / 2, capture_format_,
160 0 /* rotation */, base::TimeTicks::Now());
162 BeepAndScheduleNextCapture(
163 expected_execution_time,
164 base::Bind(&FakeVideoCaptureDevice::CaptureUsingOwnBuffers,
165 weak_factory_.GetWeakPtr()));
168 void FakeVideoCaptureDevice::CaptureUsingClientBuffers(
169 base::TimeTicks expected_execution_time) {
170 DCHECK(thread_checker_.CalledOnValidThread());
172 scoped_ptr<VideoCaptureDevice::Client::Buffer> capture_buffer(
173 client_->ReserveOutputBuffer(capture_format_.frame_size,
174 capture_format_.pixel_format,
175 capture_format_.pixel_storage));
176 DLOG_IF(ERROR, !capture_buffer) << "Couldn't allocate Capture Buffer";
177 DCHECK(capture_buffer->data()) << "Buffer has NO backing memory";
179 if (capture_format_.pixel_storage == PIXEL_STORAGE_GPUMEMORYBUFFER &&
180 capture_format_.pixel_format == media::PIXEL_FORMAT_I420) {
181 // Since SkBitmap expects a packed&continuous memory region for I420, we
182 // need to use |fake_frame_| to draw onto.
183 memset(fake_frame_.get(), 0, capture_format_.ImageAllocationSize());
184 DrawPacman(false /* use_argb */, fake_frame_.get(), frame_count_,
185 kFakeCapturePeriodMs, capture_format_.frame_size);
187 // Copy data from |fake_frame_| into the reserved planes of GpuMemoryBuffer.
188 size_t offset = 0;
189 for (size_t i = 0; i < VideoFrame::NumPlanes(PIXEL_FORMAT_I420); ++i) {
190 const size_t plane_size =
191 VideoFrame::PlaneSize(PIXEL_FORMAT_I420, i,
192 capture_format_.frame_size)
193 .GetArea();
194 memcpy(capture_buffer->data(i), fake_frame_.get() + offset, plane_size);
195 offset += plane_size;
197 } else {
198 DCHECK_EQ(capture_format_.pixel_storage, PIXEL_STORAGE_CPU);
199 DCHECK_EQ(capture_format_.pixel_format, PIXEL_FORMAT_ARGB);
200 uint8_t* data_ptr = static_cast<uint8_t*>(capture_buffer->data());
201 memset(data_ptr, 0, capture_buffer->mapped_size());
202 DrawPacman(true /* use_argb */, data_ptr, frame_count_,
203 kFakeCapturePeriodMs, capture_format_.frame_size);
206 // Give the captured frame to the client.
207 client_->OnIncomingCapturedBuffer(capture_buffer.Pass(), capture_format_,
208 base::TimeTicks::Now());
210 BeepAndScheduleNextCapture(
211 expected_execution_time,
212 base::Bind(&FakeVideoCaptureDevice::CaptureUsingClientBuffers,
213 weak_factory_.GetWeakPtr()));
216 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture(
217 base::TimeTicks expected_execution_time,
218 const base::Callback<void(base::TimeTicks)>& next_capture) {
219 // Generate a synchronized beep sound every so many frames.
220 if (frame_count_++ % kFakeCaptureBeepCycle == 0)
221 FakeAudioInputStream::BeepOnce();
223 // Reschedule next CaptureTask.
224 const base::TimeTicks current_time = base::TimeTicks::Now();
225 const base::TimeDelta frame_interval =
226 base::TimeDelta::FromMilliseconds(kFakeCapturePeriodMs);
227 // Don't accumulate any debt if we are lagging behind - just post the next
228 // frame immediately and continue as normal.
229 const base::TimeTicks next_execution_time =
230 std::max(current_time, expected_execution_time + frame_interval);
231 const base::TimeDelta delay = next_execution_time - current_time;
232 base::MessageLoop::current()->PostDelayedTask(
233 FROM_HERE, base::Bind(next_capture, next_execution_time), delay);
236 } // namespace media