[Android WebView] Fix webview perf bot switchover to use org.chromium.webview_shell...
[chromium-blink-merge.git] / content / common / gpu / media / v4l2_video_encode_accelerator.cc
blob8ee6ec96ca1d3deee0160a248e5e64cdc24843b7
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <fcntl.h>
6 #include <linux/videodev2.h>
7 #include <poll.h>
8 #include <sys/eventfd.h>
9 #include <sys/ioctl.h>
10 #include <sys/mman.h>
12 #include "base/callback.h"
13 #include "base/command_line.h"
14 #include "base/numerics/safe_conversions.h"
15 #include "base/thread_task_runner_handle.h"
16 #include "base/trace_event/trace_event.h"
17 #include "content/common/gpu/media/v4l2_video_encode_accelerator.h"
18 #include "content/public/common/content_switches.h"
19 #include "media/base/bitstream_buffer.h"
21 #define NOTIFY_ERROR(x) \
22 do { \
23 LOG(ERROR) << "Setting error state:" << x; \
24 SetErrorState(x); \
25 } while (0)
27 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value) \
28 do { \
29 if (device_->Ioctl(type, arg) != 0) { \
30 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
31 NOTIFY_ERROR(kPlatformFailureError); \
32 return value; \
33 } \
34 } while (0)
36 #define IOCTL_OR_ERROR_RETURN(type, arg) \
37 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0))
39 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
40 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false)
42 #define IOCTL_OR_LOG_ERROR(type, arg) \
43 do { \
44 if (device_->Ioctl(type, arg) != 0) \
45 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
46 } while (0)
48 namespace content {
50 struct V4L2VideoEncodeAccelerator::BitstreamBufferRef {
51 BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size)
52 : id(id), shm(shm.Pass()), size(size) {}
53 const int32 id;
54 const scoped_ptr<base::SharedMemory> shm;
55 const size_t size;
58 V4L2VideoEncodeAccelerator::InputRecord::InputRecord() : at_device(false) {
61 V4L2VideoEncodeAccelerator::InputRecord::~InputRecord() {
64 V4L2VideoEncodeAccelerator::OutputRecord::OutputRecord()
65 : at_device(false), address(NULL), length(0) {
68 V4L2VideoEncodeAccelerator::OutputRecord::~OutputRecord() {
71 V4L2VideoEncodeAccelerator::V4L2VideoEncodeAccelerator(
72 const scoped_refptr<V4L2Device>& device)
73 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
74 output_buffer_byte_size_(0),
75 device_input_format_(media::VideoFrame::UNKNOWN),
76 input_planes_count_(0),
77 output_format_fourcc_(0),
78 encoder_state_(kUninitialized),
79 stream_header_size_(0),
80 device_(device),
81 input_streamon_(false),
82 input_buffer_queued_count_(0),
83 input_memory_type_(V4L2_MEMORY_USERPTR),
84 output_streamon_(false),
85 output_buffer_queued_count_(0),
86 encoder_thread_("V4L2EncoderThread"),
87 device_poll_thread_("V4L2EncoderDevicePollThread"),
88 weak_this_ptr_factory_(this) {
89 weak_this_ = weak_this_ptr_factory_.GetWeakPtr();
92 V4L2VideoEncodeAccelerator::~V4L2VideoEncodeAccelerator() {
93 DCHECK(!encoder_thread_.IsRunning());
94 DCHECK(!device_poll_thread_.IsRunning());
95 DVLOG(4) << __func__;
97 DestroyInputBuffers();
98 DestroyOutputBuffers();
101 bool V4L2VideoEncodeAccelerator::Initialize(
102 media::VideoFrame::Format input_format,
103 const gfx::Size& input_visible_size,
104 media::VideoCodecProfile output_profile,
105 uint32 initial_bitrate,
106 Client* client) {
107 DVLOG(3) << __func__ << ": input_format="
108 << media::VideoFrame::FormatToString(input_format)
109 << ", input_visible_size=" << input_visible_size.ToString()
110 << ", output_profile=" << output_profile
111 << ", initial_bitrate=" << initial_bitrate;
113 visible_size_ = input_visible_size;
115 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
116 client_ = client_ptr_factory_->GetWeakPtr();
118 DCHECK(child_task_runner_->BelongsToCurrentThread());
119 DCHECK_EQ(encoder_state_, kUninitialized);
121 struct v4l2_capability caps;
122 memset(&caps, 0, sizeof(caps));
123 const __u32 kCapsRequired = V4L2_CAP_VIDEO_CAPTURE_MPLANE |
124 V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING;
125 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
126 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
127 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: "
128 "caps check failed: 0x" << std::hex << caps.capabilities;
129 return false;
132 if (!SetFormats(input_format, output_profile)) {
133 LOG(ERROR) << "Failed setting up formats";
134 return false;
137 if (input_format != device_input_format_) {
138 DVLOG(1) << "Input format not supported by the HW, will convert to "
139 << media::VideoFrame::FormatToString(device_input_format_);
141 scoped_refptr<V4L2Device> device =
142 V4L2Device::Create(V4L2Device::kImageProcessor);
143 image_processor_.reset(new V4L2ImageProcessor(device));
145 // Convert from input_format to device_input_format_, keeping the size
146 // at visible_size_ and requiring the output buffers to be of at least
147 // input_allocated_size_.
148 if (!image_processor_->Initialize(
149 input_format,
150 device_input_format_,
151 visible_size_,
152 visible_size_,
153 input_allocated_size_,
154 base::Bind(&V4L2VideoEncodeAccelerator::ImageProcessorError,
155 weak_this_))) {
156 LOG(ERROR) << "Failed initializing image processor";
157 return false;
161 if (!InitControls())
162 return false;
164 if (!CreateOutputBuffers())
165 return false;
167 if (!encoder_thread_.Start()) {
168 LOG(ERROR) << "Initialize(): encoder thread failed to start";
169 return false;
172 RequestEncodingParametersChange(initial_bitrate, kInitialFramerate);
174 encoder_state_ = kInitialized;
176 child_task_runner_->PostTask(
177 FROM_HERE,
178 base::Bind(&Client::RequireBitstreamBuffers, client_, kInputBufferCount,
179 image_processor_.get()
180 ? image_processor_->input_allocated_size()
181 : input_allocated_size_,
182 output_buffer_byte_size_));
183 return true;
186 void V4L2VideoEncodeAccelerator::ImageProcessorError() {
187 LOG(ERROR) << "Image processor error";
188 NOTIFY_ERROR(kPlatformFailureError);
191 void V4L2VideoEncodeAccelerator::Encode(
192 const scoped_refptr<media::VideoFrame>& frame,
193 bool force_keyframe) {
194 DVLOG(3) << "Encode(): force_keyframe=" << force_keyframe;
195 DCHECK(child_task_runner_->BelongsToCurrentThread());
197 if (image_processor_) {
198 image_processor_->Process(
199 frame,
200 base::Bind(&V4L2VideoEncodeAccelerator::FrameProcessed,
201 weak_this_,
202 force_keyframe));
203 } else {
204 encoder_thread_.message_loop()->PostTask(
205 FROM_HERE,
206 base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask,
207 base::Unretained(this),
208 frame,
209 force_keyframe));
213 void V4L2VideoEncodeAccelerator::UseOutputBitstreamBuffer(
214 const media::BitstreamBuffer& buffer) {
215 DVLOG(3) << "UseOutputBitstreamBuffer(): id=" << buffer.id();
216 DCHECK(child_task_runner_->BelongsToCurrentThread());
218 if (buffer.size() < output_buffer_byte_size_) {
219 NOTIFY_ERROR(kInvalidArgumentError);
220 return;
223 scoped_ptr<base::SharedMemory> shm(
224 new base::SharedMemory(buffer.handle(), false));
225 if (!shm->Map(buffer.size())) {
226 NOTIFY_ERROR(kPlatformFailureError);
227 return;
230 scoped_ptr<BitstreamBufferRef> buffer_ref(
231 new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size()));
232 encoder_thread_.message_loop()->PostTask(
233 FROM_HERE,
234 base::Bind(&V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask,
235 base::Unretained(this),
236 base::Passed(&buffer_ref)));
239 void V4L2VideoEncodeAccelerator::RequestEncodingParametersChange(
240 uint32 bitrate,
241 uint32 framerate) {
242 DVLOG(3) << "RequestEncodingParametersChange(): bitrate=" << bitrate
243 << ", framerate=" << framerate;
244 DCHECK(child_task_runner_->BelongsToCurrentThread());
246 encoder_thread_.message_loop()->PostTask(
247 FROM_HERE,
248 base::Bind(
249 &V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask,
250 base::Unretained(this),
251 bitrate,
252 framerate));
255 void V4L2VideoEncodeAccelerator::Destroy() {
256 DVLOG(3) << "Destroy()";
257 DCHECK(child_task_runner_->BelongsToCurrentThread());
259 // We're destroying; cancel all callbacks.
260 client_ptr_factory_.reset();
261 weak_this_ptr_factory_.InvalidateWeakPtrs();
263 if (image_processor_.get())
264 image_processor_.release()->Destroy();
266 // If the encoder thread is running, destroy using posted task.
267 if (encoder_thread_.IsRunning()) {
268 encoder_thread_.message_loop()->PostTask(
269 FROM_HERE,
270 base::Bind(&V4L2VideoEncodeAccelerator::DestroyTask,
271 base::Unretained(this)));
272 // DestroyTask() will put the encoder into kError state and cause all tasks
273 // to no-op.
274 encoder_thread_.Stop();
275 } else {
276 // Otherwise, call the destroy task directly.
277 DestroyTask();
280 // Set to kError state just in case.
281 encoder_state_ = kError;
283 delete this;
286 media::VideoEncodeAccelerator::SupportedProfiles
287 V4L2VideoEncodeAccelerator::GetSupportedProfiles() {
288 SupportedProfiles profiles;
289 SupportedProfile profile;
290 profile.max_framerate_numerator = 30;
291 profile.max_framerate_denominator = 1;
293 gfx::Size min_resolution;
294 v4l2_fmtdesc fmtdesc;
295 memset(&fmtdesc, 0, sizeof(fmtdesc));
296 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
297 for (; device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0; ++fmtdesc.index) {
298 device_->GetSupportedResolution(fmtdesc.pixelformat,
299 &min_resolution, &profile.max_resolution);
300 switch (fmtdesc.pixelformat) {
301 case V4L2_PIX_FMT_H264:
302 profile.profile = media::H264PROFILE_MAIN;
303 profiles.push_back(profile);
304 break;
305 case V4L2_PIX_FMT_VP8:
306 profile.profile = media::VP8PROFILE_ANY;
307 profiles.push_back(profile);
308 break;
309 case V4L2_PIX_FMT_VP9:
310 profile.profile = media::VP9PROFILE_ANY;
311 profiles.push_back(profile);
312 break;
316 return profiles;
319 void V4L2VideoEncodeAccelerator::FrameProcessed(
320 bool force_keyframe,
321 const scoped_refptr<media::VideoFrame>& frame) {
322 DCHECK(child_task_runner_->BelongsToCurrentThread());
323 DVLOG(3) << "FrameProcessed(): force_keyframe=" << force_keyframe;
325 encoder_thread_.message_loop()->PostTask(
326 FROM_HERE,
327 base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask,
328 base::Unretained(this),
329 frame,
330 force_keyframe));
333 void V4L2VideoEncodeAccelerator::EncodeTask(
334 const scoped_refptr<media::VideoFrame>& frame,
335 bool force_keyframe) {
336 DVLOG(3) << "EncodeTask(): force_keyframe=" << force_keyframe;
337 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
338 DCHECK_NE(encoder_state_, kUninitialized);
340 if (encoder_state_ == kError) {
341 DVLOG(2) << "EncodeTask(): early out: kError state";
342 return;
345 encoder_input_queue_.push_back(frame);
346 Enqueue();
348 if (force_keyframe) {
349 // TODO(posciak): this presently makes for slightly imprecise encoding
350 // parameters updates. To precisely align the parameter updates with the
351 // incoming input frame, we should queue the parameters together with the
352 // frame onto encoder_input_queue_ and apply them when the input is about
353 // to be queued to the codec.
354 std::vector<struct v4l2_ext_control> ctrls;
355 struct v4l2_ext_control ctrl;
356 memset(&ctrl, 0, sizeof(ctrl));
357 ctrl.id = V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE;
358 ctrl.value = V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME;
359 ctrls.push_back(ctrl);
360 if (!SetExtCtrls(ctrls)) {
361 LOG(ERROR) << "Failed requesting keyframe";
362 NOTIFY_ERROR(kPlatformFailureError);
363 return;
368 void V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask(
369 scoped_ptr<BitstreamBufferRef> buffer_ref) {
370 DVLOG(3) << "UseOutputBitstreamBufferTask(): id=" << buffer_ref->id;
371 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
373 encoder_output_queue_.push_back(
374 linked_ptr<BitstreamBufferRef>(buffer_ref.release()));
375 Enqueue();
377 if (encoder_state_ == kInitialized) {
378 // Finish setting up our OUTPUT queue. See: Initialize().
379 // VIDIOC_REQBUFS on OUTPUT queue.
380 if (!CreateInputBuffers())
381 return;
382 if (!StartDevicePoll())
383 return;
384 encoder_state_ = kEncoding;
388 void V4L2VideoEncodeAccelerator::DestroyTask() {
389 DVLOG(3) << "DestroyTask()";
391 // DestroyTask() should run regardless of encoder_state_.
393 // Stop streaming and the device_poll_thread_.
394 StopDevicePoll();
396 // Set our state to kError, and early-out all tasks.
397 encoder_state_ = kError;
400 void V4L2VideoEncodeAccelerator::ServiceDeviceTask() {
401 DVLOG(3) << "ServiceDeviceTask()";
402 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
403 DCHECK_NE(encoder_state_, kUninitialized);
404 DCHECK_NE(encoder_state_, kInitialized);
406 if (encoder_state_ == kError) {
407 DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
408 return;
411 Dequeue();
412 Enqueue();
414 // Clear the interrupt fd.
415 if (!device_->ClearDevicePollInterrupt())
416 return;
418 // Device can be polled as soon as either input or output buffers are queued.
419 bool poll_device =
420 (input_buffer_queued_count_ + output_buffer_queued_count_ > 0);
422 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
423 // so either:
424 // * device_poll_thread_ is running normally
425 // * device_poll_thread_ scheduled us, but then a DestroyTask() shut it down,
426 // in which case we're in kError state, and we should have early-outed
427 // already.
428 DCHECK(device_poll_thread_.message_loop());
429 // Queue the DevicePollTask() now.
430 device_poll_thread_.message_loop()->PostTask(
431 FROM_HERE,
432 base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask,
433 base::Unretained(this),
434 poll_device));
436 DVLOG(2) << __func__ << ": buffer counts: ENC["
437 << encoder_input_queue_.size() << "] => DEVICE["
438 << free_input_buffers_.size() << "+"
439 << input_buffer_queued_count_ << "/"
440 << input_buffer_map_.size() << "->"
441 << free_output_buffers_.size() << "+"
442 << output_buffer_queued_count_ << "/"
443 << output_buffer_map_.size() << "] => OUT["
444 << encoder_output_queue_.size() << "]";
447 void V4L2VideoEncodeAccelerator::Enqueue() {
448 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
450 DVLOG(3) << "Enqueue() "
451 << "free_input_buffers: " << free_input_buffers_.size()
452 << "input_queue: " << encoder_input_queue_.size();
454 // Enqueue all the inputs we can.
455 const int old_inputs_queued = input_buffer_queued_count_;
456 // while (!ready_input_buffers_.empty()) {
457 while (!encoder_input_queue_.empty() && !free_input_buffers_.empty()) {
458 if (!EnqueueInputRecord())
459 return;
461 if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
462 // We just started up a previously empty queue.
463 // Queue state changed; signal interrupt.
464 if (!device_->SetDevicePollInterrupt())
465 return;
466 // Start VIDIOC_STREAMON if we haven't yet.
467 if (!input_streamon_) {
468 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
469 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
470 input_streamon_ = true;
474 // Enqueue all the outputs we can.
475 const int old_outputs_queued = output_buffer_queued_count_;
476 while (!free_output_buffers_.empty() && !encoder_output_queue_.empty()) {
477 if (!EnqueueOutputRecord())
478 return;
480 if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
481 // We just started up a previously empty queue.
482 // Queue state changed; signal interrupt.
483 if (!device_->SetDevicePollInterrupt())
484 return;
485 // Start VIDIOC_STREAMON if we haven't yet.
486 if (!output_streamon_) {
487 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
488 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
489 output_streamon_ = true;
494 void V4L2VideoEncodeAccelerator::Dequeue() {
495 DVLOG(3) << "Dequeue()";
496 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
498 // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free
499 // list.
500 struct v4l2_buffer dqbuf;
501 struct v4l2_plane planes[VIDEO_MAX_PLANES];
502 while (input_buffer_queued_count_ > 0) {
503 DVLOG(4) << "inputs queued: " << input_buffer_queued_count_;
504 DCHECK(input_streamon_);
505 memset(&dqbuf, 0, sizeof(dqbuf));
506 memset(&planes, 0, sizeof(planes));
507 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
508 dqbuf.memory = input_memory_type_;
509 dqbuf.m.planes = planes;
510 dqbuf.length = input_planes_count_;
511 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
512 if (errno == EAGAIN) {
513 // EAGAIN if we're just out of buffers to dequeue.
514 break;
516 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
517 NOTIFY_ERROR(kPlatformFailureError);
518 return;
520 InputRecord& input_record = input_buffer_map_[dqbuf.index];
521 DCHECK(input_record.at_device);
522 input_record.at_device = false;
524 input_record.frame = NULL;
525 free_input_buffers_.push_back(dqbuf.index);
526 input_buffer_queued_count_--;
529 // Dequeue completed output (VIDEO_CAPTURE) buffers, and recycle to the
530 // free list. Notify the client that an output buffer is complete.
531 while (output_buffer_queued_count_ > 0) {
532 DCHECK(output_streamon_);
533 memset(&dqbuf, 0, sizeof(dqbuf));
534 memset(planes, 0, sizeof(planes));
535 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
536 dqbuf.memory = V4L2_MEMORY_MMAP;
537 dqbuf.m.planes = planes;
538 dqbuf.length = 1;
539 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
540 if (errno == EAGAIN) {
541 // EAGAIN if we're just out of buffers to dequeue.
542 break;
544 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
545 NOTIFY_ERROR(kPlatformFailureError);
546 return;
548 const bool key_frame = ((dqbuf.flags & V4L2_BUF_FLAG_KEYFRAME) != 0);
549 OutputRecord& output_record = output_buffer_map_[dqbuf.index];
550 DCHECK(output_record.at_device);
551 DCHECK(output_record.buffer_ref.get());
553 void* output_data = output_record.address;
554 size_t output_size = dqbuf.m.planes[0].bytesused;
555 // This shouldn't happen, but just in case. We should be able to recover
556 // after next keyframe after showing some corruption.
557 DCHECK_LE(output_size, output_buffer_byte_size_);
558 if (output_size > output_buffer_byte_size_)
559 output_size = output_buffer_byte_size_;
560 uint8* target_data =
561 reinterpret_cast<uint8*>(output_record.buffer_ref->shm->memory());
562 if (output_format_fourcc_ == V4L2_PIX_FMT_H264) {
563 if (stream_header_size_ == 0) {
564 // Assume that the first buffer dequeued is the stream header.
565 stream_header_size_ = output_size;
566 stream_header_.reset(new uint8[stream_header_size_]);
567 memcpy(stream_header_.get(), output_data, stream_header_size_);
569 if (key_frame &&
570 output_buffer_byte_size_ - stream_header_size_ >= output_size) {
571 // Insert stream header before every keyframe.
572 memcpy(target_data, stream_header_.get(), stream_header_size_);
573 memcpy(target_data + stream_header_size_, output_data, output_size);
574 output_size += stream_header_size_;
575 } else {
576 memcpy(target_data, output_data, output_size);
578 } else {
579 memcpy(target_data, output_data, output_size);
582 DVLOG(3) << "Dequeue(): returning "
583 "bitstream_buffer_id=" << output_record.buffer_ref->id
584 << ", size=" << output_size << ", key_frame=" << key_frame;
585 child_task_runner_->PostTask(
586 FROM_HERE,
587 base::Bind(&Client::BitstreamBufferReady, client_,
588 output_record.buffer_ref->id, output_size, key_frame));
589 output_record.at_device = false;
590 output_record.buffer_ref.reset();
591 free_output_buffers_.push_back(dqbuf.index);
592 output_buffer_queued_count_--;
596 bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() {
597 DVLOG(3) << "EnqueueInputRecord()";
598 DCHECK(!free_input_buffers_.empty());
599 DCHECK(!encoder_input_queue_.empty());
601 // Enqueue an input (VIDEO_OUTPUT) buffer.
602 scoped_refptr<media::VideoFrame> frame = encoder_input_queue_.front();
603 const int index = free_input_buffers_.back();
604 InputRecord& input_record = input_buffer_map_[index];
605 DCHECK(!input_record.at_device);
606 struct v4l2_buffer qbuf;
607 struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
608 memset(&qbuf, 0, sizeof(qbuf));
609 memset(qbuf_planes, 0, sizeof(qbuf_planes));
610 qbuf.index = index;
611 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
612 qbuf.m.planes = qbuf_planes;
614 DCHECK_EQ(device_input_format_, frame->format());
615 for (size_t i = 0; i < input_planes_count_; ++i) {
616 qbuf.m.planes[i].bytesused =
617 base::checked_cast<__u32>(media::VideoFrame::PlaneAllocationSize(
618 frame->format(), i, input_allocated_size_));
620 switch (input_memory_type_) {
621 case V4L2_MEMORY_USERPTR:
622 qbuf.m.planes[i].length = qbuf.m.planes[i].bytesused;
623 qbuf.m.planes[i].m.userptr =
624 reinterpret_cast<unsigned long>(frame->data(i));
625 DCHECK(qbuf.m.planes[i].m.userptr);
626 break;
628 case V4L2_MEMORY_DMABUF:
629 qbuf.m.planes[i].m.fd = frame->dmabuf_fd(i);
630 DCHECK_NE(qbuf.m.planes[i].m.fd, -1);
631 break;
633 default:
634 NOTREACHED();
635 return false;
639 qbuf.memory = input_memory_type_;
640 qbuf.length = input_planes_count_;
642 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
643 input_record.at_device = true;
644 input_record.frame = frame;
645 encoder_input_queue_.pop_front();
646 free_input_buffers_.pop_back();
647 input_buffer_queued_count_++;
648 return true;
651 bool V4L2VideoEncodeAccelerator::EnqueueOutputRecord() {
652 DVLOG(3) << "EnqueueOutputRecord()";
653 DCHECK(!free_output_buffers_.empty());
654 DCHECK(!encoder_output_queue_.empty());
656 // Enqueue an output (VIDEO_CAPTURE) buffer.
657 linked_ptr<BitstreamBufferRef> output_buffer = encoder_output_queue_.back();
658 const int index = free_output_buffers_.back();
659 OutputRecord& output_record = output_buffer_map_[index];
660 DCHECK(!output_record.at_device);
661 DCHECK(!output_record.buffer_ref.get());
662 struct v4l2_buffer qbuf;
663 struct v4l2_plane qbuf_planes[1];
664 memset(&qbuf, 0, sizeof(qbuf));
665 memset(qbuf_planes, 0, sizeof(qbuf_planes));
666 qbuf.index = index;
667 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
668 qbuf.memory = V4L2_MEMORY_MMAP;
669 qbuf.m.planes = qbuf_planes;
670 qbuf.length = 1;
671 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
672 output_record.at_device = true;
673 output_record.buffer_ref = output_buffer;
674 encoder_output_queue_.pop_back();
675 free_output_buffers_.pop_back();
676 output_buffer_queued_count_++;
677 return true;
680 bool V4L2VideoEncodeAccelerator::StartDevicePoll() {
681 DVLOG(3) << "StartDevicePoll()";
682 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
683 DCHECK(!device_poll_thread_.IsRunning());
685 // Start up the device poll thread and schedule its first DevicePollTask().
686 if (!device_poll_thread_.Start()) {
687 LOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
688 NOTIFY_ERROR(kPlatformFailureError);
689 return false;
691 // Enqueue a poll task with no devices to poll on -- it will wait only on the
692 // interrupt fd.
693 device_poll_thread_.message_loop()->PostTask(
694 FROM_HERE,
695 base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask,
696 base::Unretained(this),
697 false));
699 return true;
702 bool V4L2VideoEncodeAccelerator::StopDevicePoll() {
703 DVLOG(3) << "StopDevicePoll()";
705 // Signal the DevicePollTask() to stop, and stop the device poll thread.
706 if (!device_->SetDevicePollInterrupt())
707 return false;
708 device_poll_thread_.Stop();
709 // Clear the interrupt now, to be sure.
710 if (!device_->ClearDevicePollInterrupt())
711 return false;
713 if (input_streamon_) {
714 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
715 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
717 input_streamon_ = false;
719 if (output_streamon_) {
720 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
721 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
723 output_streamon_ = false;
725 // Reset all our accounting info.
726 encoder_input_queue_.clear();
727 free_input_buffers_.clear();
728 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
729 InputRecord& input_record = input_buffer_map_[i];
730 input_record.at_device = false;
731 input_record.frame = NULL;
732 free_input_buffers_.push_back(i);
734 input_buffer_queued_count_ = 0;
736 free_output_buffers_.clear();
737 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
738 OutputRecord& output_record = output_buffer_map_[i];
739 output_record.at_device = false;
740 output_record.buffer_ref.reset();
741 free_output_buffers_.push_back(i);
743 output_buffer_queued_count_ = 0;
745 encoder_output_queue_.clear();
747 DVLOG(3) << "StopDevicePoll(): device poll stopped";
748 return true;
751 void V4L2VideoEncodeAccelerator::DevicePollTask(bool poll_device) {
752 DVLOG(3) << "DevicePollTask()";
753 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
755 bool event_pending;
756 if (!device_->Poll(poll_device, &event_pending)) {
757 NOTIFY_ERROR(kPlatformFailureError);
758 return;
761 // All processing should happen on ServiceDeviceTask(), since we shouldn't
762 // touch encoder state from this thread.
763 encoder_thread_.message_loop()->PostTask(
764 FROM_HERE,
765 base::Bind(&V4L2VideoEncodeAccelerator::ServiceDeviceTask,
766 base::Unretained(this)));
769 void V4L2VideoEncodeAccelerator::NotifyError(Error error) {
770 DVLOG(1) << "NotifyError(): error=" << error;
772 if (!child_task_runner_->BelongsToCurrentThread()) {
773 child_task_runner_->PostTask(
774 FROM_HERE, base::Bind(&V4L2VideoEncodeAccelerator::NotifyError,
775 weak_this_, error));
776 return;
779 if (client_) {
780 client_->NotifyError(error);
781 client_ptr_factory_.reset();
785 void V4L2VideoEncodeAccelerator::SetErrorState(Error error) {
786 // We can touch encoder_state_ only if this is the encoder thread or the
787 // encoder thread isn't running.
788 if (encoder_thread_.message_loop() != NULL &&
789 encoder_thread_.message_loop() != base::MessageLoop::current()) {
790 encoder_thread_.message_loop()->PostTask(
791 FROM_HERE, base::Bind(&V4L2VideoEncodeAccelerator::SetErrorState,
792 base::Unretained(this), error));
793 return;
796 // Post NotifyError only if we are already initialized, as the API does
797 // not allow doing so before that.
798 if (encoder_state_ != kError && encoder_state_ != kUninitialized)
799 NotifyError(error);
801 encoder_state_ = kError;
804 void V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask(
805 uint32 bitrate,
806 uint32 framerate) {
807 DVLOG(3) << "RequestEncodingParametersChangeTask(): bitrate=" << bitrate
808 << ", framerate=" << framerate;
809 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
811 if (bitrate < 1)
812 bitrate = 1;
813 if (framerate < 1)
814 framerate = 1;
816 std::vector<struct v4l2_ext_control> ctrls;
817 struct v4l2_ext_control ctrl;
818 memset(&ctrl, 0, sizeof(ctrl));
819 ctrl.id = V4L2_CID_MPEG_VIDEO_BITRATE;
820 ctrl.value = bitrate;
821 ctrls.push_back(ctrl);
822 if (!SetExtCtrls(ctrls)) {
823 LOG(ERROR) << "Failed changing bitrate";
824 NOTIFY_ERROR(kPlatformFailureError);
825 return;
828 struct v4l2_streamparm parms;
829 memset(&parms, 0, sizeof(parms));
830 parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
831 // Note that we are provided "frames per second" but V4L2 expects "time per
832 // frame"; hence we provide the reciprocal of the framerate here.
833 parms.parm.output.timeperframe.numerator = 1;
834 parms.parm.output.timeperframe.denominator = framerate;
835 IOCTL_OR_ERROR_RETURN(VIDIOC_S_PARM, &parms);
838 bool V4L2VideoEncodeAccelerator::SetOutputFormat(
839 media::VideoCodecProfile output_profile) {
840 DCHECK(child_task_runner_->BelongsToCurrentThread());
841 DCHECK(!input_streamon_);
842 DCHECK(!output_streamon_);
844 output_format_fourcc_ =
845 V4L2Device::VideoCodecProfileToV4L2PixFmt(output_profile, false);
846 if (!output_format_fourcc_) {
847 LOG(ERROR) << "Initialize(): invalid output_profile=" << output_profile;
848 return false;
851 output_buffer_byte_size_ = kOutputBufferSize;
853 struct v4l2_format format;
854 memset(&format, 0, sizeof(format));
855 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
856 format.fmt.pix_mp.width = visible_size_.width();
857 format.fmt.pix_mp.height = visible_size_.height();
858 format.fmt.pix_mp.pixelformat = output_format_fourcc_;
859 format.fmt.pix_mp.plane_fmt[0].sizeimage =
860 base::checked_cast<__u32>(output_buffer_byte_size_);
861 format.fmt.pix_mp.num_planes = 1;
862 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
864 // Device might have adjusted the required output size.
865 size_t adjusted_output_buffer_size =
866 base::checked_cast<size_t>(format.fmt.pix_mp.plane_fmt[0].sizeimage);
867 output_buffer_byte_size_ = adjusted_output_buffer_size;
869 return true;
872 bool V4L2VideoEncodeAccelerator::NegotiateInputFormat(
873 media::VideoFrame::Format input_format) {
874 DVLOG(3) << "NegotiateInputFormat()";
875 DCHECK(child_task_runner_->BelongsToCurrentThread());
876 DCHECK(!input_streamon_);
877 DCHECK(!output_streamon_);
879 device_input_format_ = media::VideoFrame::UNKNOWN;
880 input_planes_count_ = 0;
882 uint32 input_format_fourcc =
883 V4L2Device::VideoFrameFormatToV4L2PixFmt(input_format);
884 if (!input_format_fourcc) {
885 LOG(ERROR) << "Unsupported input format";
886 return false;
889 size_t input_planes_count = media::VideoFrame::NumPlanes(input_format);
890 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES));
892 // First see if we the device can use the provided input_format directly.
893 struct v4l2_format format;
894 memset(&format, 0, sizeof(format));
895 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
896 format.fmt.pix_mp.width = visible_size_.width();
897 format.fmt.pix_mp.height = visible_size_.height();
898 format.fmt.pix_mp.pixelformat = input_format_fourcc;
899 format.fmt.pix_mp.num_planes = input_planes_count;
900 if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) {
901 // Error or format unsupported by device, try to negotiate a fallback.
902 input_format_fourcc = device_->PreferredInputFormat();
903 input_format =
904 V4L2Device::V4L2PixFmtToVideoFrameFormat(input_format_fourcc);
905 if (input_format == media::VideoFrame::UNKNOWN)
906 return false;
908 input_planes_count = media::VideoFrame::NumPlanes(input_format);
909 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES));
911 // Device might have adjusted parameters, reset them along with the format.
912 memset(&format, 0, sizeof(format));
913 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
914 format.fmt.pix_mp.width = visible_size_.width();
915 format.fmt.pix_mp.height = visible_size_.height();
916 format.fmt.pix_mp.pixelformat = input_format_fourcc;
917 format.fmt.pix_mp.num_planes = input_planes_count;
918 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
919 DCHECK_EQ(format.fmt.pix_mp.num_planes, input_planes_count);
922 // Take device-adjusted sizes for allocated size.
923 input_allocated_size_ = V4L2Device::CodedSizeFromV4L2Format(format);
924 DCHECK(gfx::Rect(input_allocated_size_).Contains(gfx::Rect(visible_size_)));
926 device_input_format_ = input_format;
927 input_planes_count_ = input_planes_count;
928 return true;
931 bool V4L2VideoEncodeAccelerator::SetFormats(
932 media::VideoFrame::Format input_format,
933 media::VideoCodecProfile output_profile) {
934 DVLOG(3) << "SetFormats()";
935 DCHECK(child_task_runner_->BelongsToCurrentThread());
936 DCHECK(!input_streamon_);
937 DCHECK(!output_streamon_);
939 if (!SetOutputFormat(output_profile))
940 return false;
942 if (!NegotiateInputFormat(input_format))
943 return false;
945 struct v4l2_crop crop;
946 memset(&crop, 0, sizeof(crop));
947 crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
948 crop.c.left = 0;
949 crop.c.top = 0;
950 crop.c.width = visible_size_.width();
951 crop.c.height = visible_size_.height();
952 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CROP, &crop);
954 return true;
957 bool V4L2VideoEncodeAccelerator::SetExtCtrls(
958 std::vector<struct v4l2_ext_control> ctrls) {
959 struct v4l2_ext_controls ext_ctrls;
960 memset(&ext_ctrls, 0, sizeof(ext_ctrls));
961 ext_ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
962 ext_ctrls.count = ctrls.size();
963 ext_ctrls.controls = &ctrls[0];
964 return device_->Ioctl(VIDIOC_S_EXT_CTRLS, &ext_ctrls) == 0;
967 bool V4L2VideoEncodeAccelerator::InitControls() {
968 std::vector<struct v4l2_ext_control> ctrls;
969 struct v4l2_ext_control ctrl;
971 // Enable frame-level bitrate control. This is the only mandatory control.
972 memset(&ctrl, 0, sizeof(ctrl));
973 ctrl.id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE;
974 ctrl.value = 1;
975 ctrls.push_back(ctrl);
976 if (!SetExtCtrls(ctrls)) {
977 LOG(ERROR) << "Failed enabling bitrate control";
978 NOTIFY_ERROR(kPlatformFailureError);
979 return false;
982 // Optional controls.
983 ctrls.clear();
984 if (output_format_fourcc_ == V4L2_PIX_FMT_H264) {
985 // No B-frames, for lowest decoding latency.
986 memset(&ctrl, 0, sizeof(ctrl));
987 ctrl.id = V4L2_CID_MPEG_VIDEO_B_FRAMES;
988 ctrl.value = 0;
989 ctrls.push_back(ctrl);
991 // Quantization parameter maximum value (for variable bitrate control).
992 memset(&ctrl, 0, sizeof(ctrl));
993 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP;
994 ctrl.value = 51;
995 ctrls.push_back(ctrl);
997 // Use H.264 level 4.0 to match the supported max resolution.
998 memset(&ctrl, 0, sizeof(ctrl));
999 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
1000 ctrl.value = V4L2_MPEG_VIDEO_H264_LEVEL_4_0;
1001 ctrls.push_back(ctrl);
1003 // Separate stream header so we can cache it and insert into the stream.
1004 memset(&ctrl, 0, sizeof(ctrl));
1005 ctrl.id = V4L2_CID_MPEG_VIDEO_HEADER_MODE;
1006 ctrl.value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE;
1007 ctrls.push_back(ctrl);
1010 // Enable "tight" bitrate mode. For this to work properly, frame- and mb-level
1011 // bitrate controls have to be enabled as well.
1012 memset(&ctrl, 0, sizeof(ctrl));
1013 ctrl.id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF;
1014 ctrl.value = 1;
1015 ctrls.push_back(ctrl);
1017 // Force bitrate control to average over a GOP (for tight bitrate
1018 // tolerance).
1019 memset(&ctrl, 0, sizeof(ctrl));
1020 ctrl.id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT;
1021 ctrl.value = 1;
1022 ctrls.push_back(ctrl);
1024 // Enable macroblock-level bitrate control.
1025 memset(&ctrl, 0, sizeof(ctrl));
1026 ctrl.id = V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE;
1027 ctrl.value = 1;
1028 ctrls.push_back(ctrl);
1030 // Disable periodic key frames.
1031 memset(&ctrl, 0, sizeof(ctrl));
1032 ctrl.id = V4L2_CID_MPEG_VIDEO_GOP_SIZE;
1033 ctrl.value = 0;
1034 ctrls.push_back(ctrl);
1036 // Ignore return value as these controls are optional.
1037 SetExtCtrls(ctrls);
1039 return true;
1042 bool V4L2VideoEncodeAccelerator::CreateInputBuffers() {
1043 DVLOG(3) << "CreateInputBuffers()";
1044 // This function runs on encoder_thread_ after output buffers have been
1045 // provided by the client.
1046 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
1047 DCHECK(!input_streamon_);
1049 struct v4l2_requestbuffers reqbufs;
1050 memset(&reqbufs, 0, sizeof(reqbufs));
1051 // Driver will modify to the appropriate number of buffers.
1052 reqbufs.count = 1;
1053 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1054 // TODO(posciak): Once we start doing zero-copy, we should decide based on
1055 // the current pipeline setup which memory type to use. This should probably
1056 // be decided based on an argument to Initialize().
1057 if (image_processor_.get())
1058 input_memory_type_ = V4L2_MEMORY_DMABUF;
1059 else
1060 input_memory_type_ = V4L2_MEMORY_USERPTR;
1062 reqbufs.memory = input_memory_type_;
1063 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1065 DCHECK(input_buffer_map_.empty());
1066 input_buffer_map_.resize(reqbufs.count);
1067 for (size_t i = 0; i < input_buffer_map_.size(); ++i)
1068 free_input_buffers_.push_back(i);
1070 return true;
1073 bool V4L2VideoEncodeAccelerator::CreateOutputBuffers() {
1074 DVLOG(3) << "CreateOutputBuffers()";
1075 DCHECK(child_task_runner_->BelongsToCurrentThread());
1076 DCHECK(!output_streamon_);
1078 struct v4l2_requestbuffers reqbufs;
1079 memset(&reqbufs, 0, sizeof(reqbufs));
1080 reqbufs.count = kOutputBufferCount;
1081 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1082 reqbufs.memory = V4L2_MEMORY_MMAP;
1083 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1085 DCHECK(output_buffer_map_.empty());
1086 output_buffer_map_.resize(reqbufs.count);
1087 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1088 struct v4l2_plane planes[1];
1089 struct v4l2_buffer buffer;
1090 memset(&buffer, 0, sizeof(buffer));
1091 memset(planes, 0, sizeof(planes));
1092 buffer.index = i;
1093 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1094 buffer.memory = V4L2_MEMORY_MMAP;
1095 buffer.m.planes = planes;
1096 buffer.length = arraysize(planes);
1097 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
1098 void* address = device_->Mmap(NULL,
1099 buffer.m.planes[0].length,
1100 PROT_READ | PROT_WRITE,
1101 MAP_SHARED,
1102 buffer.m.planes[0].m.mem_offset);
1103 if (address == MAP_FAILED) {
1104 PLOG(ERROR) << "CreateOutputBuffers(): mmap() failed";
1105 return false;
1107 output_buffer_map_[i].address = address;
1108 output_buffer_map_[i].length = buffer.m.planes[0].length;
1109 free_output_buffers_.push_back(i);
1112 return true;
1115 void V4L2VideoEncodeAccelerator::DestroyInputBuffers() {
1116 DVLOG(3) << "DestroyInputBuffers()";
1117 DCHECK(child_task_runner_->BelongsToCurrentThread());
1118 DCHECK(!input_streamon_);
1120 struct v4l2_requestbuffers reqbufs;
1121 memset(&reqbufs, 0, sizeof(reqbufs));
1122 reqbufs.count = 0;
1123 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1124 reqbufs.memory = input_memory_type_;
1125 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1127 input_buffer_map_.clear();
1128 free_input_buffers_.clear();
1131 void V4L2VideoEncodeAccelerator::DestroyOutputBuffers() {
1132 DVLOG(3) << "DestroyOutputBuffers()";
1133 DCHECK(child_task_runner_->BelongsToCurrentThread());
1134 DCHECK(!output_streamon_);
1136 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1137 if (output_buffer_map_[i].address != NULL)
1138 device_->Munmap(output_buffer_map_[i].address,
1139 output_buffer_map_[i].length);
1142 struct v4l2_requestbuffers reqbufs;
1143 memset(&reqbufs, 0, sizeof(reqbufs));
1144 reqbufs.count = 0;
1145 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1146 reqbufs.memory = V4L2_MEMORY_MMAP;
1147 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1149 output_buffer_map_.clear();
1150 free_output_buffers_.clear();
1153 } // namespace content