Roll src/third_party/WebKit eac3800:0237a66 (svn 202606:202607)
[chromium-blink-merge.git] / content / common / gpu / media / v4l2_video_decode_accelerator.cc
blobf2170807a7a60a8662b7230d033fb3782a7d67fd
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <dlfcn.h>
6 #include <errno.h>
7 #include <fcntl.h>
8 #include <linux/videodev2.h>
9 #include <poll.h>
10 #include <sys/eventfd.h>
11 #include <sys/ioctl.h>
12 #include <sys/mman.h>
14 #include "base/bind.h"
15 #include "base/command_line.h"
16 #include "base/memory/shared_memory.h"
17 #include "base/message_loop/message_loop.h"
18 #include "base/numerics/safe_conversions.h"
19 #include "base/thread_task_runner_handle.h"
20 #include "base/trace_event/trace_event.h"
21 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
22 #include "media/base/media_switches.h"
23 #include "media/filters/h264_parser.h"
24 #include "ui/gfx/geometry/rect.h"
25 #include "ui/gl/scoped_binders.h"
27 #define NOTIFY_ERROR(x) \
28 do { \
29 LOG(ERROR) << "Setting error state:" << x; \
30 SetErrorState(x); \
31 } while (0)
33 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \
34 do { \
35 if (device_->Ioctl(type, arg) != 0) { \
36 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << type_str; \
37 NOTIFY_ERROR(PLATFORM_FAILURE); \
38 return value; \
39 } \
40 } while (0)
42 #define IOCTL_OR_ERROR_RETURN(type, arg) \
43 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type)
45 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
46 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type)
48 #define IOCTL_OR_LOG_ERROR(type, arg) \
49 do { \
50 if (device_->Ioctl(type, arg) != 0) \
51 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
52 } while (0)
54 namespace content {
56 struct V4L2VideoDecodeAccelerator::BitstreamBufferRef {
57 BitstreamBufferRef(
58 base::WeakPtr<Client>& client,
59 scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
60 base::SharedMemory* shm,
61 size_t size,
62 int32 input_id);
63 ~BitstreamBufferRef();
64 const base::WeakPtr<Client> client;
65 const scoped_refptr<base::SingleThreadTaskRunner> client_task_runner;
66 const scoped_ptr<base::SharedMemory> shm;
67 const size_t size;
68 size_t bytes_used;
69 const int32 input_id;
72 struct V4L2VideoDecodeAccelerator::EGLSyncKHRRef {
73 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
74 ~EGLSyncKHRRef();
75 EGLDisplay const egl_display;
76 EGLSyncKHR egl_sync;
79 struct V4L2VideoDecodeAccelerator::PictureRecord {
80 PictureRecord(bool cleared, const media::Picture& picture);
81 ~PictureRecord();
82 bool cleared; // Whether the texture is cleared and safe to render from.
83 media::Picture picture; // The decoded picture.
86 V4L2VideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
87 base::WeakPtr<Client>& client,
88 scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
89 base::SharedMemory* shm,
90 size_t size,
91 int32 input_id)
92 : client(client),
93 client_task_runner(client_task_runner),
94 shm(shm),
95 size(size),
96 bytes_used(0),
97 input_id(input_id) {
100 V4L2VideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
101 if (input_id >= 0) {
102 client_task_runner->PostTask(
103 FROM_HERE,
104 base::Bind(&Client::NotifyEndOfBitstreamBuffer, client, input_id));
108 V4L2VideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
109 EGLDisplay egl_display, EGLSyncKHR egl_sync)
110 : egl_display(egl_display),
111 egl_sync(egl_sync) {
114 V4L2VideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
115 // We don't check for eglDestroySyncKHR failures, because if we get here
116 // with a valid sync object, something went wrong and we are getting
117 // destroyed anyway.
118 if (egl_sync != EGL_NO_SYNC_KHR)
119 eglDestroySyncKHR(egl_display, egl_sync);
122 V4L2VideoDecodeAccelerator::InputRecord::InputRecord()
123 : at_device(false),
124 address(NULL),
125 length(0),
126 bytes_used(0),
127 input_id(-1) {
130 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {
133 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord()
134 : at_device(false),
135 at_client(false),
136 egl_image(EGL_NO_IMAGE_KHR),
137 egl_sync(EGL_NO_SYNC_KHR),
138 picture_id(-1),
139 cleared(false) {
142 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {}
144 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(
145 bool cleared,
146 const media::Picture& picture)
147 : cleared(cleared), picture(picture) {}
149 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
151 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator(
152 EGLDisplay egl_display,
153 EGLContext egl_context,
154 const base::WeakPtr<Client>& io_client,
155 const base::Callback<bool(void)>& make_context_current,
156 const scoped_refptr<V4L2Device>& device,
157 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
158 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
159 io_task_runner_(io_task_runner),
160 io_client_(io_client),
161 decoder_thread_("V4L2DecoderThread"),
162 decoder_state_(kUninitialized),
163 device_(device),
164 decoder_delay_bitstream_buffer_id_(-1),
165 decoder_current_input_buffer_(-1),
166 decoder_decode_buffer_tasks_scheduled_(0),
167 decoder_frames_at_client_(0),
168 decoder_flushing_(false),
169 resolution_change_reset_pending_(false),
170 decoder_partial_frame_pending_(false),
171 input_streamon_(false),
172 input_buffer_queued_count_(0),
173 output_streamon_(false),
174 output_buffer_queued_count_(0),
175 output_dpb_size_(0),
176 output_planes_count_(0),
177 picture_clearing_count_(0),
178 pictures_assigned_(false, false),
179 device_poll_thread_("V4L2DevicePollThread"),
180 make_context_current_(make_context_current),
181 egl_display_(egl_display),
182 egl_context_(egl_context),
183 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN),
184 output_format_fourcc_(0),
185 weak_this_factory_(this) {
186 weak_this_ = weak_this_factory_.GetWeakPtr();
189 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() {
190 DCHECK(!decoder_thread_.IsRunning());
191 DCHECK(!device_poll_thread_.IsRunning());
193 DestroyInputBuffers();
194 DestroyOutputBuffers();
196 // These maps have members that should be manually destroyed, e.g. file
197 // descriptors, mmap() segments, etc.
198 DCHECK(input_buffer_map_.empty());
199 DCHECK(output_buffer_map_.empty());
202 bool V4L2VideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
203 Client* client) {
204 DVLOG(3) << "Initialize()";
205 DCHECK(child_task_runner_->BelongsToCurrentThread());
206 DCHECK_EQ(decoder_state_, kUninitialized);
208 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
209 client_ = client_ptr_factory_->GetWeakPtr();
211 switch (profile) {
212 case media::H264PROFILE_BASELINE:
213 DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE";
214 break;
215 case media::H264PROFILE_MAIN:
216 DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN";
217 break;
218 case media::H264PROFILE_HIGH:
219 DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH";
220 break;
221 case media::VP8PROFILE_ANY:
222 DVLOG(2) << "Initialize(): profile VP8PROFILE_ANY";
223 break;
224 case media::VP9PROFILE_ANY:
225 DVLOG(2) << "Initialize(): profile VP9PROFILE_ANY";
226 break;
227 default:
228 DLOG(ERROR) << "Initialize(): unsupported profile=" << profile;
229 return false;
231 video_profile_ = profile;
233 if (egl_display_ == EGL_NO_DISPLAY) {
234 LOG(ERROR) << "Initialize(): could not get EGLDisplay";
235 return false;
238 // We need the context to be initialized to query extensions.
239 if (!make_context_current_.Run()) {
240 LOG(ERROR) << "Initialize(): could not make context current";
241 return false;
244 // TODO(posciak): crbug.com/450898.
245 #if defined(ARCH_CPU_ARMEL)
246 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
247 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync";
248 return false;
250 #endif
252 // Capabilities check.
253 struct v4l2_capability caps;
254 const __u32 kCapsRequired =
255 V4L2_CAP_VIDEO_CAPTURE_MPLANE |
256 V4L2_CAP_VIDEO_OUTPUT_MPLANE |
257 V4L2_CAP_STREAMING;
258 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
259 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
260 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
261 ", caps check failed: 0x" << std::hex << caps.capabilities;
262 return false;
265 if (!SetupFormats())
266 return false;
268 // Subscribe to the resolution change event.
269 struct v4l2_event_subscription sub;
270 memset(&sub, 0, sizeof(sub));
271 sub.type = V4L2_EVENT_SOURCE_CHANGE;
272 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_SUBSCRIBE_EVENT, &sub);
274 if (video_profile_ >= media::H264PROFILE_MIN &&
275 video_profile_ <= media::H264PROFILE_MAX) {
276 decoder_h264_parser_.reset(new media::H264Parser());
279 if (!CreateInputBuffers())
280 return false;
282 if (!decoder_thread_.Start()) {
283 LOG(ERROR) << "Initialize(): decoder thread failed to start";
284 return false;
287 decoder_state_ = kInitialized;
289 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here.
290 decoder_thread_.message_loop()->PostTask(
291 FROM_HERE,
292 base::Bind(
293 base::IgnoreResult(&V4L2VideoDecodeAccelerator::StartDevicePoll),
294 base::Unretained(this)));
296 return true;
299 void V4L2VideoDecodeAccelerator::Decode(
300 const media::BitstreamBuffer& bitstream_buffer) {
301 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
302 << ", size=" << bitstream_buffer.size();
303 DCHECK(io_task_runner_->BelongsToCurrentThread());
305 // DecodeTask() will take care of running a DecodeBufferTask().
306 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
307 &V4L2VideoDecodeAccelerator::DecodeTask, base::Unretained(this),
308 bitstream_buffer));
311 void V4L2VideoDecodeAccelerator::AssignPictureBuffers(
312 const std::vector<media::PictureBuffer>& buffers) {
313 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size();
314 DCHECK(child_task_runner_->BelongsToCurrentThread());
316 const uint32_t req_buffer_count =
317 output_dpb_size_ + kDpbOutputBufferExtraCount;
319 if (buffers.size() < req_buffer_count) {
320 LOG(ERROR) << "AssignPictureBuffers(): Failed to provide requested picture"
321 " buffers. (Got " << buffers.size()
322 << ", requested " << req_buffer_count << ")";
323 NOTIFY_ERROR(INVALID_ARGUMENT);
324 return;
327 if (!make_context_current_.Run()) {
328 LOG(ERROR) << "AssignPictureBuffers(): could not make context current";
329 NOTIFY_ERROR(PLATFORM_FAILURE);
330 return;
333 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0);
335 // It's safe to manipulate all the buffer state here, because the decoder
336 // thread is waiting on pictures_assigned_.
338 // Allocate the output buffers.
339 struct v4l2_requestbuffers reqbufs;
340 memset(&reqbufs, 0, sizeof(reqbufs));
341 reqbufs.count = buffers.size();
342 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
343 reqbufs.memory = V4L2_MEMORY_MMAP;
344 IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs);
346 if (reqbufs.count != buffers.size()) {
347 DLOG(ERROR) << "Could not allocate enough output buffers";
348 NOTIFY_ERROR(PLATFORM_FAILURE);
349 return;
352 output_buffer_map_.resize(buffers.size());
354 DCHECK(free_output_buffers_.empty());
355 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
356 DCHECK(buffers[i].size() == coded_size_);
358 OutputRecord& output_record = output_buffer_map_[i];
359 DCHECK(!output_record.at_device);
360 DCHECK(!output_record.at_client);
361 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
362 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
363 DCHECK_EQ(output_record.picture_id, -1);
364 DCHECK_EQ(output_record.cleared, false);
366 EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_,
367 egl_context_,
368 buffers[i].texture_id(),
369 coded_size_,
371 output_format_fourcc_,
372 output_planes_count_);
373 if (egl_image == EGL_NO_IMAGE_KHR) {
374 LOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
375 // Ownership of EGLImages allocated in previous iterations of this loop
376 // has been transferred to output_buffer_map_. After we error-out here
377 // the destructor will handle their cleanup.
378 NOTIFY_ERROR(PLATFORM_FAILURE);
379 return;
382 output_record.egl_image = egl_image;
383 output_record.picture_id = buffers[i].id();
384 free_output_buffers_.push(i);
385 DVLOG(3) << "AssignPictureBuffers(): buffer[" << i
386 << "]: picture_id=" << output_record.picture_id;
389 pictures_assigned_.Signal();
392 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) {
393 DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id;
394 // Must be run on child thread, as we'll insert a sync in the EGL context.
395 DCHECK(child_task_runner_->BelongsToCurrentThread());
397 if (!make_context_current_.Run()) {
398 LOG(ERROR) << "ReusePictureBuffer(): could not make context current";
399 NOTIFY_ERROR(PLATFORM_FAILURE);
400 return;
403 EGLSyncKHR egl_sync = EGL_NO_SYNC_KHR;
404 // TODO(posciak): crbug.com/450898.
405 #if defined(ARCH_CPU_ARMEL)
406 egl_sync = eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
407 if (egl_sync == EGL_NO_SYNC_KHR) {
408 LOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed";
409 NOTIFY_ERROR(PLATFORM_FAILURE);
410 return;
412 #endif
414 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef(
415 egl_display_, egl_sync));
416 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
417 &V4L2VideoDecodeAccelerator::ReusePictureBufferTask,
418 base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref)));
421 void V4L2VideoDecodeAccelerator::Flush() {
422 DVLOG(3) << "Flush()";
423 DCHECK(child_task_runner_->BelongsToCurrentThread());
424 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
425 &V4L2VideoDecodeAccelerator::FlushTask, base::Unretained(this)));
428 void V4L2VideoDecodeAccelerator::Reset() {
429 DVLOG(3) << "Reset()";
430 DCHECK(child_task_runner_->BelongsToCurrentThread());
431 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
432 &V4L2VideoDecodeAccelerator::ResetTask, base::Unretained(this)));
435 void V4L2VideoDecodeAccelerator::Destroy() {
436 DVLOG(3) << "Destroy()";
437 DCHECK(child_task_runner_->BelongsToCurrentThread());
439 // We're destroying; cancel all callbacks.
440 client_ptr_factory_.reset();
441 weak_this_factory_.InvalidateWeakPtrs();
443 // If the decoder thread is running, destroy using posted task.
444 if (decoder_thread_.IsRunning()) {
445 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
446 &V4L2VideoDecodeAccelerator::DestroyTask, base::Unretained(this)));
447 pictures_assigned_.Signal();
448 // DestroyTask() will cause the decoder_thread_ to flush all tasks.
449 decoder_thread_.Stop();
450 } else {
451 // Otherwise, call the destroy task directly.
452 DestroyTask();
455 delete this;
458 bool V4L2VideoDecodeAccelerator::CanDecodeOnIOThread() { return true; }
460 // static
461 media::VideoDecodeAccelerator::SupportedProfiles
462 V4L2VideoDecodeAccelerator::GetSupportedProfiles() {
463 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder);
464 if (!device)
465 return SupportedProfiles();
467 const uint32_t supported_formats[] = {
468 V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9};
469 return device->GetSupportedDecodeProfiles(arraysize(supported_formats),
470 supported_formats);
473 void V4L2VideoDecodeAccelerator::DecodeTask(
474 const media::BitstreamBuffer& bitstream_buffer) {
475 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_buffer.id();
476 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
477 DCHECK_NE(decoder_state_, kUninitialized);
478 TRACE_EVENT1("Video Decoder", "V4L2VDA::DecodeTask", "input_id",
479 bitstream_buffer.id());
481 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
482 io_client_, io_task_runner_,
483 new base::SharedMemory(bitstream_buffer.handle(), true),
484 bitstream_buffer.size(), bitstream_buffer.id()));
485 if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
486 LOG(ERROR) << "Decode(): could not map bitstream_buffer";
487 NOTIFY_ERROR(UNREADABLE_INPUT);
488 return;
490 DVLOG(3) << "DecodeTask(): mapped at=" << bitstream_record->shm->memory();
492 if (decoder_state_ == kResetting || decoder_flushing_) {
493 // In the case that we're resetting or flushing, we need to delay decoding
494 // the BitstreamBuffers that come after the Reset() or Flush() call. When
495 // we're here, we know that this DecodeTask() was scheduled by a Decode()
496 // call that came after (in the client thread) the Reset() or Flush() call;
497 // thus set up the delay if necessary.
498 if (decoder_delay_bitstream_buffer_id_ == -1)
499 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
500 } else if (decoder_state_ == kError) {
501 DVLOG(2) << "DecodeTask(): early out: kError state";
502 return;
505 decoder_input_queue_.push(
506 linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
507 decoder_decode_buffer_tasks_scheduled_++;
508 DecodeBufferTask();
511 void V4L2VideoDecodeAccelerator::DecodeBufferTask() {
512 DVLOG(3) << "DecodeBufferTask()";
513 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
514 DCHECK_NE(decoder_state_, kUninitialized);
515 TRACE_EVENT0("Video Decoder", "V4L2VDA::DecodeBufferTask");
517 decoder_decode_buffer_tasks_scheduled_--;
519 if (decoder_state_ == kResetting) {
520 DVLOG(2) << "DecodeBufferTask(): early out: kResetting state";
521 return;
522 } else if (decoder_state_ == kError) {
523 DVLOG(2) << "DecodeBufferTask(): early out: kError state";
524 return;
525 } else if (decoder_state_ == kChangingResolution) {
526 DVLOG(2) << "DecodeBufferTask(): early out: resolution change pending";
527 return;
530 if (decoder_current_bitstream_buffer_ == NULL) {
531 if (decoder_input_queue_.empty()) {
532 // We're waiting for a new buffer -- exit without scheduling a new task.
533 return;
535 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
536 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
537 // We're asked to delay decoding on this and subsequent buffers.
538 return;
541 // Setup to use the next buffer.
542 decoder_current_bitstream_buffer_.reset(buffer_ref.release());
543 decoder_input_queue_.pop();
544 DVLOG(3) << "DecodeBufferTask(): reading input_id="
545 << decoder_current_bitstream_buffer_->input_id
546 << ", addr=" << (decoder_current_bitstream_buffer_->shm ?
547 decoder_current_bitstream_buffer_->shm->memory() :
548 NULL)
549 << ", size=" << decoder_current_bitstream_buffer_->size;
551 bool schedule_task = false;
552 const size_t size = decoder_current_bitstream_buffer_->size;
553 size_t decoded_size = 0;
554 if (size == 0) {
555 const int32 input_id = decoder_current_bitstream_buffer_->input_id;
556 if (input_id >= 0) {
557 // This is a buffer queued from the client that has zero size. Skip.
558 schedule_task = true;
559 } else {
560 // This is a buffer of zero size, queued to flush the pipe. Flush.
561 DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(),
562 static_cast<base::SharedMemory*>(NULL));
563 // Enqueue a buffer guaranteed to be empty. To do that, we flush the
564 // current input, enqueue no data to the next frame, then flush that down.
565 schedule_task = true;
566 if (decoder_current_input_buffer_ != -1 &&
567 input_buffer_map_[decoder_current_input_buffer_].input_id !=
568 kFlushBufferId)
569 schedule_task = FlushInputFrame();
571 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
572 DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer";
573 decoder_partial_frame_pending_ = false;
574 schedule_task = true;
575 } else {
576 // If we failed to enqueue the empty buffer (due to pipeline
577 // backpressure), don't advance the bitstream buffer queue, and don't
578 // schedule the next task. This bitstream buffer queue entry will get
579 // reprocessed when the pipeline frees up.
580 schedule_task = false;
583 } else {
584 // This is a buffer queued from the client, with actual contents. Decode.
585 const uint8* const data =
586 reinterpret_cast<const uint8*>(
587 decoder_current_bitstream_buffer_->shm->memory()) +
588 decoder_current_bitstream_buffer_->bytes_used;
589 const size_t data_size =
590 decoder_current_bitstream_buffer_->size -
591 decoder_current_bitstream_buffer_->bytes_used;
592 if (!AdvanceFrameFragment(data, data_size, &decoded_size)) {
593 NOTIFY_ERROR(UNREADABLE_INPUT);
594 return;
596 // AdvanceFrameFragment should not return a size larger than the buffer
597 // size, even on invalid data.
598 CHECK_LE(decoded_size, data_size);
600 switch (decoder_state_) {
601 case kInitialized:
602 case kAfterReset:
603 schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
604 break;
605 case kDecoding:
606 schedule_task = DecodeBufferContinue(data, decoded_size);
607 break;
608 default:
609 NOTIFY_ERROR(ILLEGAL_STATE);
610 return;
613 if (decoder_state_ == kError) {
614 // Failed during decode.
615 return;
618 if (schedule_task) {
619 decoder_current_bitstream_buffer_->bytes_used += decoded_size;
620 if (decoder_current_bitstream_buffer_->bytes_used ==
621 decoder_current_bitstream_buffer_->size) {
622 // Our current bitstream buffer is done; return it.
623 int32 input_id = decoder_current_bitstream_buffer_->input_id;
624 DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id;
625 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
626 decoder_current_bitstream_buffer_.reset();
628 ScheduleDecodeBufferTaskIfNeeded();
632 bool V4L2VideoDecodeAccelerator::AdvanceFrameFragment(
633 const uint8* data,
634 size_t size,
635 size_t* endpos) {
636 if (video_profile_ >= media::H264PROFILE_MIN &&
637 video_profile_ <= media::H264PROFILE_MAX) {
638 // For H264, we need to feed HW one frame at a time. This is going to take
639 // some parsing of our input stream.
640 decoder_h264_parser_->SetStream(data, size);
641 media::H264NALU nalu;
642 media::H264Parser::Result result;
643 *endpos = 0;
645 // Keep on peeking the next NALs while they don't indicate a frame
646 // boundary.
647 for (;;) {
648 bool end_of_frame = false;
649 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
650 if (result == media::H264Parser::kInvalidStream ||
651 result == media::H264Parser::kUnsupportedStream)
652 return false;
653 if (result == media::H264Parser::kEOStream) {
654 // We've reached the end of the buffer before finding a frame boundary.
655 decoder_partial_frame_pending_ = true;
656 return true;
658 switch (nalu.nal_unit_type) {
659 case media::H264NALU::kNonIDRSlice:
660 case media::H264NALU::kIDRSlice:
661 if (nalu.size < 1)
662 return false;
663 // For these two, if the "first_mb_in_slice" field is zero, start a
664 // new frame and return. This field is Exp-Golomb coded starting on
665 // the eighth data bit of the NAL; a zero value is encoded with a
666 // leading '1' bit in the byte, which we can detect as the byte being
667 // (unsigned) greater than or equal to 0x80.
668 if (nalu.data[1] >= 0x80) {
669 end_of_frame = true;
670 break;
672 break;
673 case media::H264NALU::kSEIMessage:
674 case media::H264NALU::kSPS:
675 case media::H264NALU::kPPS:
676 case media::H264NALU::kAUD:
677 case media::H264NALU::kEOSeq:
678 case media::H264NALU::kEOStream:
679 case media::H264NALU::kReserved14:
680 case media::H264NALU::kReserved15:
681 case media::H264NALU::kReserved16:
682 case media::H264NALU::kReserved17:
683 case media::H264NALU::kReserved18:
684 // These unconditionally signal a frame boundary.
685 end_of_frame = true;
686 break;
687 default:
688 // For all others, keep going.
689 break;
691 if (end_of_frame) {
692 if (!decoder_partial_frame_pending_ && *endpos == 0) {
693 // The frame was previously restarted, and we haven't filled the
694 // current frame with any contents yet. Start the new frame here and
695 // continue parsing NALs.
696 } else {
697 // The frame wasn't previously restarted and/or we have contents for
698 // the current frame; signal the start of a new frame here: we don't
699 // have a partial frame anymore.
700 decoder_partial_frame_pending_ = false;
701 return true;
704 *endpos = (nalu.data + nalu.size) - data;
706 NOTREACHED();
707 return false;
708 } else {
709 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN);
710 DCHECK_LE(video_profile_, media::VP9PROFILE_MAX);
711 // For VP8/9, we can just dump the entire buffer. No fragmentation needed,
712 // and we never return a partial frame.
713 *endpos = size;
714 decoder_partial_frame_pending_ = false;
715 return true;
719 void V4L2VideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
720 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
722 // If we're behind on tasks, schedule another one.
723 int buffers_to_decode = decoder_input_queue_.size();
724 if (decoder_current_bitstream_buffer_ != NULL)
725 buffers_to_decode++;
726 if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
727 decoder_decode_buffer_tasks_scheduled_++;
728 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
729 &V4L2VideoDecodeAccelerator::DecodeBufferTask,
730 base::Unretained(this)));
734 bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(
735 const void* data, size_t size, size_t* endpos) {
736 DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size;
737 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
738 DCHECK_NE(decoder_state_, kUninitialized);
739 DCHECK_NE(decoder_state_, kDecoding);
740 // Initial decode. We haven't been able to get output stream format info yet.
741 // Get it, and start decoding.
743 // Copy in and send to HW.
744 if (!AppendToInputFrame(data, size))
745 return false;
747 // If we only have a partial frame, don't flush and process yet.
748 if (decoder_partial_frame_pending_)
749 return true;
751 if (!FlushInputFrame())
752 return false;
754 // Recycle buffers.
755 Dequeue();
757 // Check and see if we have format info yet.
758 struct v4l2_format format;
759 gfx::Size visible_size;
760 bool again = false;
761 if (!GetFormatInfo(&format, &visible_size, &again))
762 return false;
764 *endpos = size;
766 if (again) {
767 // Need more stream to decode format, return true and schedule next buffer.
768 return true;
771 // Run this initialization only on first startup.
772 if (decoder_state_ == kInitialized) {
773 DVLOG(3) << "DecodeBufferInitial(): running initialization";
774 // Success! Setup our parameters.
775 if (!CreateBuffersForFormat(format, visible_size))
776 return false;
779 decoder_state_ = kDecoding;
780 ScheduleDecodeBufferTaskIfNeeded();
781 return true;
784 bool V4L2VideoDecodeAccelerator::DecodeBufferContinue(
785 const void* data, size_t size) {
786 DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size;
787 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
788 DCHECK_EQ(decoder_state_, kDecoding);
790 // Both of these calls will set kError state if they fail.
791 // Only flush the frame if it's complete.
792 return (AppendToInputFrame(data, size) &&
793 (decoder_partial_frame_pending_ || FlushInputFrame()));
796 bool V4L2VideoDecodeAccelerator::AppendToInputFrame(
797 const void* data, size_t size) {
798 DVLOG(3) << "AppendToInputFrame()";
799 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
800 DCHECK_NE(decoder_state_, kUninitialized);
801 DCHECK_NE(decoder_state_, kResetting);
802 DCHECK_NE(decoder_state_, kError);
803 // This routine can handle data == NULL and size == 0, which occurs when
804 // we queue an empty buffer for the purposes of flushing the pipe.
806 // Flush if we're too big
807 if (decoder_current_input_buffer_ != -1) {
808 InputRecord& input_record =
809 input_buffer_map_[decoder_current_input_buffer_];
810 if (input_record.bytes_used + size > input_record.length) {
811 if (!FlushInputFrame())
812 return false;
813 decoder_current_input_buffer_ = -1;
817 // Try to get an available input buffer
818 if (decoder_current_input_buffer_ == -1) {
819 if (free_input_buffers_.empty()) {
820 // See if we can get more free buffers from HW
821 Dequeue();
822 if (free_input_buffers_.empty()) {
823 // Nope!
824 DVLOG(2) << "AppendToInputFrame(): stalled for input buffers";
825 return false;
828 decoder_current_input_buffer_ = free_input_buffers_.back();
829 free_input_buffers_.pop_back();
830 InputRecord& input_record =
831 input_buffer_map_[decoder_current_input_buffer_];
832 DCHECK_EQ(input_record.bytes_used, 0);
833 DCHECK_EQ(input_record.input_id, -1);
834 DCHECK(decoder_current_bitstream_buffer_ != NULL);
835 input_record.input_id = decoder_current_bitstream_buffer_->input_id;
838 DCHECK(data != NULL || size == 0);
839 if (size == 0) {
840 // If we asked for an empty buffer, return now. We return only after
841 // getting the next input buffer, since we might actually want an empty
842 // input buffer for flushing purposes.
843 return true;
846 // Copy in to the buffer.
847 InputRecord& input_record =
848 input_buffer_map_[decoder_current_input_buffer_];
849 if (size > input_record.length - input_record.bytes_used) {
850 LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring";
851 NOTIFY_ERROR(UNREADABLE_INPUT);
852 return false;
854 memcpy(
855 reinterpret_cast<uint8*>(input_record.address) + input_record.bytes_used,
856 data,
857 size);
858 input_record.bytes_used += size;
860 return true;
863 bool V4L2VideoDecodeAccelerator::FlushInputFrame() {
864 DVLOG(3) << "FlushInputFrame()";
865 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
866 DCHECK_NE(decoder_state_, kUninitialized);
867 DCHECK_NE(decoder_state_, kResetting);
868 DCHECK_NE(decoder_state_, kError);
870 if (decoder_current_input_buffer_ == -1)
871 return true;
873 InputRecord& input_record =
874 input_buffer_map_[decoder_current_input_buffer_];
875 DCHECK_NE(input_record.input_id, -1);
876 DCHECK(input_record.input_id != kFlushBufferId ||
877 input_record.bytes_used == 0);
878 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
879 // got from the client. We can skip it if it is empty.
880 // * if input_id < 0 (should be kFlushBufferId in this case), this input
881 // buffer was prompted by a flush buffer, and should be queued even when
882 // empty.
883 if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
884 input_record.input_id = -1;
885 free_input_buffers_.push_back(decoder_current_input_buffer_);
886 decoder_current_input_buffer_ = -1;
887 return true;
890 // Queue it.
891 input_ready_queue_.push(decoder_current_input_buffer_);
892 decoder_current_input_buffer_ = -1;
893 DVLOG(3) << "FlushInputFrame(): submitting input_id="
894 << input_record.input_id;
895 // Enqueue once since there's new available input for it.
896 Enqueue();
898 return (decoder_state_ != kError);
901 void V4L2VideoDecodeAccelerator::ServiceDeviceTask(bool event_pending) {
902 DVLOG(3) << "ServiceDeviceTask()";
903 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
904 DCHECK_NE(decoder_state_, kUninitialized);
905 TRACE_EVENT0("Video Decoder", "V4L2VDA::ServiceDeviceTask");
907 if (decoder_state_ == kResetting) {
908 DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state";
909 return;
910 } else if (decoder_state_ == kError) {
911 DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
912 return;
913 } else if (decoder_state_ == kChangingResolution) {
914 DVLOG(2) << "ServiceDeviceTask(): early out: kChangingResolution state";
915 return;
918 bool resolution_change_pending = false;
919 if (event_pending)
920 resolution_change_pending = DequeueResolutionChangeEvent();
921 Dequeue();
922 Enqueue();
924 // Clear the interrupt fd.
925 if (!device_->ClearDevicePollInterrupt()) {
926 NOTIFY_ERROR(PLATFORM_FAILURE);
927 return;
930 bool poll_device = false;
931 // Add fd, if we should poll on it.
932 // Can be polled as soon as either input or output buffers are queued.
933 if (input_buffer_queued_count_ + output_buffer_queued_count_ > 0)
934 poll_device = true;
936 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
937 // so either:
938 // * device_poll_thread_ is running normally
939 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
940 // shut it down, in which case we're either in kResetting or kError states
941 // respectively, and we should have early-outed already.
942 DCHECK(device_poll_thread_.message_loop());
943 // Queue the DevicePollTask() now.
944 device_poll_thread_.message_loop()->PostTask(
945 FROM_HERE,
946 base::Bind(&V4L2VideoDecodeAccelerator::DevicePollTask,
947 base::Unretained(this),
948 poll_device));
950 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC["
951 << decoder_input_queue_.size() << "->"
952 << input_ready_queue_.size() << "] => DEVICE["
953 << free_input_buffers_.size() << "+"
954 << input_buffer_queued_count_ << "/"
955 << input_buffer_map_.size() << "->"
956 << free_output_buffers_.size() << "+"
957 << output_buffer_queued_count_ << "/"
958 << output_buffer_map_.size() << "] => VDA["
959 << decoder_frames_at_client_ << "]";
961 ScheduleDecodeBufferTaskIfNeeded();
962 if (resolution_change_pending)
963 StartResolutionChange();
966 void V4L2VideoDecodeAccelerator::Enqueue() {
967 DVLOG(3) << "Enqueue()";
968 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
969 DCHECK_NE(decoder_state_, kUninitialized);
970 TRACE_EVENT0("Video Decoder", "V4L2VDA::Enqueue");
972 // Drain the pipe of completed decode buffers.
973 const int old_inputs_queued = input_buffer_queued_count_;
974 while (!input_ready_queue_.empty()) {
975 if (!EnqueueInputRecord())
976 return;
978 if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
979 // We just started up a previously empty queue.
980 // Queue state changed; signal interrupt.
981 if (!device_->SetDevicePollInterrupt()) {
982 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
983 NOTIFY_ERROR(PLATFORM_FAILURE);
984 return;
986 // Start VIDIOC_STREAMON if we haven't yet.
987 if (!input_streamon_) {
988 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
989 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
990 input_streamon_ = true;
994 // Enqueue all the outputs we can.
995 const int old_outputs_queued = output_buffer_queued_count_;
996 while (!free_output_buffers_.empty()) {
997 if (!EnqueueOutputRecord())
998 return;
1000 if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
1001 // We just started up a previously empty queue.
1002 // Queue state changed; signal interrupt.
1003 if (!device_->SetDevicePollInterrupt()) {
1004 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
1005 NOTIFY_ERROR(PLATFORM_FAILURE);
1006 return;
1008 // Start VIDIOC_STREAMON if we haven't yet.
1009 if (!output_streamon_) {
1010 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1011 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
1012 output_streamon_ = true;
1017 bool V4L2VideoDecodeAccelerator::DequeueResolutionChangeEvent() {
1018 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1019 DCHECK_NE(decoder_state_, kUninitialized);
1020 DVLOG(3) << "DequeueResolutionChangeEvent()";
1022 struct v4l2_event ev;
1023 memset(&ev, 0, sizeof(ev));
1025 while (device_->Ioctl(VIDIOC_DQEVENT, &ev) == 0) {
1026 if (ev.type == V4L2_EVENT_SOURCE_CHANGE) {
1027 uint32_t changes = ev.u.src_change.changes;
1028 // We used to define source change was always resolution change. The union
1029 // |ev.u| is not used and it is zero by default. When using the upstream
1030 // version of the resolution event change, we also need to check
1031 // |ev.u.src_change.changes| to know what is changed. For API backward
1032 // compatibility, event is treated as resolution change when all bits in
1033 // |ev.u.src_change.changes| are cleared.
1034 if (changes == 0 || (changes & V4L2_EVENT_SRC_CH_RESOLUTION)) {
1035 DVLOG(3)
1036 << "DequeueResolutionChangeEvent(): got resolution change event.";
1037 return true;
1039 } else {
1040 LOG(ERROR) << "DequeueResolutionChangeEvent(): got an event (" << ev.type
1041 << ") we haven't subscribed to.";
1044 return false;
1047 void V4L2VideoDecodeAccelerator::Dequeue() {
1048 DVLOG(3) << "Dequeue()";
1049 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1050 DCHECK_NE(decoder_state_, kUninitialized);
1051 TRACE_EVENT0("Video Decoder", "V4L2VDA::Dequeue");
1053 // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free
1054 // list.
1055 while (input_buffer_queued_count_ > 0) {
1056 DCHECK(input_streamon_);
1057 struct v4l2_buffer dqbuf;
1058 struct v4l2_plane planes[1];
1059 memset(&dqbuf, 0, sizeof(dqbuf));
1060 memset(planes, 0, sizeof(planes));
1061 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1062 dqbuf.memory = V4L2_MEMORY_MMAP;
1063 dqbuf.m.planes = planes;
1064 dqbuf.length = 1;
1065 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1066 if (errno == EAGAIN) {
1067 // EAGAIN if we're just out of buffers to dequeue.
1068 break;
1070 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1071 NOTIFY_ERROR(PLATFORM_FAILURE);
1072 return;
1074 InputRecord& input_record = input_buffer_map_[dqbuf.index];
1075 DCHECK(input_record.at_device);
1076 free_input_buffers_.push_back(dqbuf.index);
1077 input_record.at_device = false;
1078 input_record.bytes_used = 0;
1079 input_record.input_id = -1;
1080 input_buffer_queued_count_--;
1083 // Dequeue completed output (VIDEO_CAPTURE) buffers, and queue to the
1084 // completed queue.
1085 while (output_buffer_queued_count_ > 0) {
1086 DCHECK(output_streamon_);
1087 struct v4l2_buffer dqbuf;
1088 scoped_ptr<struct v4l2_plane[]> planes(
1089 new v4l2_plane[output_planes_count_]);
1090 memset(&dqbuf, 0, sizeof(dqbuf));
1091 memset(planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1092 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1093 dqbuf.memory = V4L2_MEMORY_MMAP;
1094 dqbuf.m.planes = planes.get();
1095 dqbuf.length = output_planes_count_;
1096 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1097 if (errno == EAGAIN) {
1098 // EAGAIN if we're just out of buffers to dequeue.
1099 break;
1101 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1102 NOTIFY_ERROR(PLATFORM_FAILURE);
1103 return;
1105 OutputRecord& output_record = output_buffer_map_[dqbuf.index];
1106 DCHECK(output_record.at_device);
1107 DCHECK(!output_record.at_client);
1108 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1109 DCHECK_NE(output_record.picture_id, -1);
1110 output_record.at_device = false;
1111 if (dqbuf.m.planes[0].bytesused == 0) {
1112 // This is an empty output buffer returned as part of a flush.
1113 free_output_buffers_.push(dqbuf.index);
1114 } else {
1115 DCHECK_GE(dqbuf.timestamp.tv_sec, 0);
1116 output_record.at_client = true;
1117 DVLOG(3) << "Dequeue(): returning input_id=" << dqbuf.timestamp.tv_sec
1118 << " as picture_id=" << output_record.picture_id;
1119 const media::Picture& picture =
1120 media::Picture(output_record.picture_id, dqbuf.timestamp.tv_sec,
1121 gfx::Rect(visible_size_), false);
1122 pending_picture_ready_.push(
1123 PictureRecord(output_record.cleared, picture));
1124 SendPictureReady();
1125 output_record.cleared = true;
1126 decoder_frames_at_client_++;
1128 output_buffer_queued_count_--;
1131 NotifyFlushDoneIfNeeded();
1134 bool V4L2VideoDecodeAccelerator::EnqueueInputRecord() {
1135 DVLOG(3) << "EnqueueInputRecord()";
1136 DCHECK(!input_ready_queue_.empty());
1138 // Enqueue an input (VIDEO_OUTPUT) buffer.
1139 const int buffer = input_ready_queue_.front();
1140 InputRecord& input_record = input_buffer_map_[buffer];
1141 DCHECK(!input_record.at_device);
1142 struct v4l2_buffer qbuf;
1143 struct v4l2_plane qbuf_plane;
1144 memset(&qbuf, 0, sizeof(qbuf));
1145 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1146 qbuf.index = buffer;
1147 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1148 qbuf.timestamp.tv_sec = input_record.input_id;
1149 qbuf.memory = V4L2_MEMORY_MMAP;
1150 qbuf.m.planes = &qbuf_plane;
1151 qbuf.m.planes[0].bytesused = input_record.bytes_used;
1152 qbuf.length = 1;
1153 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1154 input_ready_queue_.pop();
1155 input_record.at_device = true;
1156 input_buffer_queued_count_++;
1157 DVLOG(3) << "EnqueueInputRecord(): enqueued input_id="
1158 << input_record.input_id << " size=" << input_record.bytes_used;
1159 return true;
1162 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() {
1163 DVLOG(3) << "EnqueueOutputRecord()";
1164 DCHECK(!free_output_buffers_.empty());
1166 // Enqueue an output (VIDEO_CAPTURE) buffer.
1167 const int buffer = free_output_buffers_.front();
1168 OutputRecord& output_record = output_buffer_map_[buffer];
1169 DCHECK(!output_record.at_device);
1170 DCHECK(!output_record.at_client);
1171 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1172 DCHECK_NE(output_record.picture_id, -1);
1173 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1174 TRACE_EVENT0("Video Decoder",
1175 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR");
1176 // If we have to wait for completion, wait. Note that
1177 // free_output_buffers_ is a FIFO queue, so we always wait on the
1178 // buffer that has been in the queue the longest.
1179 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
1180 EGL_FOREVER_KHR) == EGL_FALSE) {
1181 // This will cause tearing, but is safe otherwise.
1182 DVLOG(1) << __func__ << " eglClientWaitSyncKHR failed!";
1184 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1185 LOG(ERROR) << __func__ << " eglDestroySyncKHR failed!";
1186 NOTIFY_ERROR(PLATFORM_FAILURE);
1187 return false;
1189 output_record.egl_sync = EGL_NO_SYNC_KHR;
1191 struct v4l2_buffer qbuf;
1192 scoped_ptr<struct v4l2_plane[]> qbuf_planes(
1193 new v4l2_plane[output_planes_count_]);
1194 memset(&qbuf, 0, sizeof(qbuf));
1195 memset(
1196 qbuf_planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1197 qbuf.index = buffer;
1198 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1199 qbuf.memory = V4L2_MEMORY_MMAP;
1200 qbuf.m.planes = qbuf_planes.get();
1201 qbuf.length = output_planes_count_;
1202 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1203 free_output_buffers_.pop();
1204 output_record.at_device = true;
1205 output_buffer_queued_count_++;
1206 return true;
1209 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask(
1210 int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1211 DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id="
1212 << picture_buffer_id;
1213 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1214 TRACE_EVENT0("Video Decoder", "V4L2VDA::ReusePictureBufferTask");
1216 // We run ReusePictureBufferTask even if we're in kResetting.
1217 if (decoder_state_ == kError) {
1218 DVLOG(2) << "ReusePictureBufferTask(): early out: kError state";
1219 return;
1222 if (decoder_state_ == kChangingResolution) {
1223 DVLOG(2) << "ReusePictureBufferTask(): early out: kChangingResolution";
1224 return;
1227 size_t index;
1228 for (index = 0; index < output_buffer_map_.size(); ++index)
1229 if (output_buffer_map_[index].picture_id == picture_buffer_id)
1230 break;
1232 if (index >= output_buffer_map_.size()) {
1233 // It's possible that we've already posted a DismissPictureBuffer for this
1234 // picture, but it has not yet executed when this ReusePictureBuffer was
1235 // posted to us by the client. In that case just ignore this (we've already
1236 // dismissed it and accounted for that) and let the sync object get
1237 // destroyed.
1238 DVLOG(4) << "ReusePictureBufferTask(): got picture id= "
1239 << picture_buffer_id << " not in use (anymore?).";
1240 return;
1243 OutputRecord& output_record = output_buffer_map_[index];
1244 if (output_record.at_device || !output_record.at_client) {
1245 LOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable";
1246 NOTIFY_ERROR(INVALID_ARGUMENT);
1247 return;
1250 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1251 DCHECK(!output_record.at_device);
1252 output_record.at_client = false;
1253 output_record.egl_sync = egl_sync_ref->egl_sync;
1254 free_output_buffers_.push(index);
1255 decoder_frames_at_client_--;
1256 // Take ownership of the EGLSync.
1257 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1258 // We got a buffer back, so enqueue it back.
1259 Enqueue();
1262 void V4L2VideoDecodeAccelerator::FlushTask() {
1263 DVLOG(3) << "FlushTask()";
1264 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1265 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask");
1267 // Flush outstanding buffers.
1268 if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) {
1269 // There's nothing in the pipe, so return done immediately.
1270 DVLOG(3) << "FlushTask(): returning flush";
1271 child_task_runner_->PostTask(FROM_HERE,
1272 base::Bind(&Client::NotifyFlushDone, client_));
1273 return;
1274 } else if (decoder_state_ == kError) {
1275 DVLOG(2) << "FlushTask(): early out: kError state";
1276 return;
1279 // We don't support stacked flushing.
1280 DCHECK(!decoder_flushing_);
1282 // Queue up an empty buffer -- this triggers the flush.
1283 decoder_input_queue_.push(
1284 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
1285 io_client_, io_task_runner_, NULL, 0, kFlushBufferId)));
1286 decoder_flushing_ = true;
1287 SendPictureReady(); // Send all pending PictureReady.
1289 ScheduleDecodeBufferTaskIfNeeded();
1292 void V4L2VideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1293 if (!decoder_flushing_)
1294 return;
1296 // Pipeline is empty when:
1297 // * Decoder input queue is empty of non-delayed buffers.
1298 // * There is no currently filling input buffer.
1299 // * Input holding queue is empty.
1300 // * All input (VIDEO_OUTPUT) buffers are returned.
1301 if (!decoder_input_queue_.empty()) {
1302 if (decoder_input_queue_.front()->input_id !=
1303 decoder_delay_bitstream_buffer_id_)
1304 return;
1306 if (decoder_current_input_buffer_ != -1)
1307 return;
1308 if ((input_ready_queue_.size() + input_buffer_queued_count_) != 0)
1309 return;
1311 // TODO(posciak): crbug.com/270039. Exynos requires a streamoff-streamon
1312 // sequence after flush to continue, even if we are not resetting. This would
1313 // make sense, because we don't really want to resume from a non-resume point
1314 // (e.g. not from an IDR) if we are flushed.
1315 // MSE player however triggers a Flush() on chunk end, but never Reset(). One
1316 // could argue either way, or even say that Flush() is not needed/harmful when
1317 // transitioning to next chunk.
1318 // For now, do the streamoff-streamon cycle to satisfy Exynos and not freeze
1319 // when doing MSE. This should be harmless otherwise.
1320 if (!(StopDevicePoll() && StopOutputStream() && StopInputStream()))
1321 return;
1323 if (!StartDevicePoll())
1324 return;
1326 decoder_delay_bitstream_buffer_id_ = -1;
1327 decoder_flushing_ = false;
1328 DVLOG(3) << "NotifyFlushDoneIfNeeded(): returning flush";
1329 child_task_runner_->PostTask(FROM_HERE,
1330 base::Bind(&Client::NotifyFlushDone, client_));
1332 // While we were flushing, we early-outed DecodeBufferTask()s.
1333 ScheduleDecodeBufferTaskIfNeeded();
1336 void V4L2VideoDecodeAccelerator::ResetTask() {
1337 DVLOG(3) << "ResetTask()";
1338 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1339 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetTask");
1341 if (decoder_state_ == kError) {
1342 DVLOG(2) << "ResetTask(): early out: kError state";
1343 return;
1346 // If we are in the middle of switching resolutions, postpone reset until
1347 // it's done. We don't have to worry about timing of this wrt to decoding,
1348 // because output pipe is already stopped if we are changing resolution.
1349 // We will come back here after we are done with the resolution change.
1350 DCHECK(!resolution_change_reset_pending_);
1351 if (decoder_state_ == kChangingResolution) {
1352 resolution_change_reset_pending_ = true;
1353 return;
1356 // After the output stream is stopped, the codec should not post any
1357 // resolution change events. So we dequeue the resolution change event
1358 // afterwards. The event could be posted before or while stopping the output
1359 // stream. The codec will expect the buffer of new size after the seek, so
1360 // we need to handle the resolution change event first.
1361 if (!(StopDevicePoll() && StopOutputStream()))
1362 return;
1364 if (DequeueResolutionChangeEvent()) {
1365 resolution_change_reset_pending_ = true;
1366 StartResolutionChange();
1367 return;
1370 if (!StopInputStream())
1371 return;
1373 decoder_current_bitstream_buffer_.reset();
1374 while (!decoder_input_queue_.empty())
1375 decoder_input_queue_.pop();
1377 decoder_current_input_buffer_ = -1;
1379 // If we were flushing, we'll never return any more BitstreamBuffers or
1380 // PictureBuffers; they have all been dropped and returned by now.
1381 NotifyFlushDoneIfNeeded();
1383 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening
1384 // jobs will early-out in the kResetting state.
1385 decoder_state_ = kResetting;
1386 SendPictureReady(); // Send all pending PictureReady.
1387 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1388 &V4L2VideoDecodeAccelerator::ResetDoneTask, base::Unretained(this)));
1391 void V4L2VideoDecodeAccelerator::ResetDoneTask() {
1392 DVLOG(3) << "ResetDoneTask()";
1393 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1394 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetDoneTask");
1396 if (decoder_state_ == kError) {
1397 DVLOG(2) << "ResetDoneTask(): early out: kError state";
1398 return;
1401 if (!StartDevicePoll())
1402 return;
1404 // Reset format-specific bits.
1405 if (video_profile_ >= media::H264PROFILE_MIN &&
1406 video_profile_ <= media::H264PROFILE_MAX) {
1407 decoder_h264_parser_.reset(new media::H264Parser());
1410 // Jobs drained, we're finished resetting.
1411 DCHECK_EQ(decoder_state_, kResetting);
1412 if (output_buffer_map_.empty()) {
1413 // We must have gotten Reset() before we had a chance to request buffers
1414 // from the client.
1415 decoder_state_ = kInitialized;
1416 } else {
1417 decoder_state_ = kAfterReset;
1420 decoder_partial_frame_pending_ = false;
1421 decoder_delay_bitstream_buffer_id_ = -1;
1422 child_task_runner_->PostTask(FROM_HERE,
1423 base::Bind(&Client::NotifyResetDone, client_));
1425 // While we were resetting, we early-outed DecodeBufferTask()s.
1426 ScheduleDecodeBufferTaskIfNeeded();
1429 void V4L2VideoDecodeAccelerator::DestroyTask() {
1430 DVLOG(3) << "DestroyTask()";
1431 TRACE_EVENT0("Video Decoder", "V4L2VDA::DestroyTask");
1433 // DestroyTask() should run regardless of decoder_state_.
1435 StopDevicePoll();
1436 StopOutputStream();
1437 StopInputStream();
1439 decoder_current_bitstream_buffer_.reset();
1440 decoder_current_input_buffer_ = -1;
1441 decoder_decode_buffer_tasks_scheduled_ = 0;
1442 decoder_frames_at_client_ = 0;
1443 while (!decoder_input_queue_.empty())
1444 decoder_input_queue_.pop();
1445 decoder_flushing_ = false;
1447 // Set our state to kError. Just in case.
1448 decoder_state_ = kError;
1451 bool V4L2VideoDecodeAccelerator::StartDevicePoll() {
1452 DVLOG(3) << "StartDevicePoll()";
1453 DCHECK(!device_poll_thread_.IsRunning());
1454 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1456 // Start up the device poll thread and schedule its first DevicePollTask().
1457 if (!device_poll_thread_.Start()) {
1458 LOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
1459 NOTIFY_ERROR(PLATFORM_FAILURE);
1460 return false;
1462 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1463 &V4L2VideoDecodeAccelerator::DevicePollTask,
1464 base::Unretained(this),
1465 0));
1467 return true;
1470 bool V4L2VideoDecodeAccelerator::StopDevicePoll() {
1471 DVLOG(3) << "StopDevicePoll()";
1473 if (!device_poll_thread_.IsRunning())
1474 return true;
1476 if (decoder_thread_.IsRunning())
1477 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1479 // Signal the DevicePollTask() to stop, and stop the device poll thread.
1480 if (!device_->SetDevicePollInterrupt()) {
1481 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
1482 NOTIFY_ERROR(PLATFORM_FAILURE);
1483 return false;
1485 device_poll_thread_.Stop();
1486 // Clear the interrupt now, to be sure.
1487 if (!device_->ClearDevicePollInterrupt()) {
1488 NOTIFY_ERROR(PLATFORM_FAILURE);
1489 return false;
1491 DVLOG(3) << "StopDevicePoll(): device poll stopped";
1492 return true;
1495 bool V4L2VideoDecodeAccelerator::StopOutputStream() {
1496 DVLOG(3) << "StopOutputStream()";
1497 if (!output_streamon_)
1498 return true;
1500 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1501 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1502 output_streamon_ = false;
1504 // Reset accounting info for output.
1505 while (!free_output_buffers_.empty())
1506 free_output_buffers_.pop();
1508 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1509 OutputRecord& output_record = output_buffer_map_[i];
1510 DCHECK(!(output_record.at_client && output_record.at_device));
1512 // After streamoff, the device drops ownership of all buffers, even if
1513 // we don't dequeue them explicitly.
1514 output_buffer_map_[i].at_device = false;
1515 // Some of them may still be owned by the client however.
1516 // Reuse only those that aren't.
1517 if (!output_record.at_client) {
1518 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1519 free_output_buffers_.push(i);
1522 output_buffer_queued_count_ = 0;
1523 return true;
1526 bool V4L2VideoDecodeAccelerator::StopInputStream() {
1527 DVLOG(3) << "StopInputStream()";
1528 if (!input_streamon_)
1529 return true;
1531 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1532 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1533 input_streamon_ = false;
1535 // Reset accounting info for input.
1536 while (!input_ready_queue_.empty())
1537 input_ready_queue_.pop();
1538 free_input_buffers_.clear();
1539 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1540 free_input_buffers_.push_back(i);
1541 input_buffer_map_[i].at_device = false;
1542 input_buffer_map_[i].bytes_used = 0;
1543 input_buffer_map_[i].input_id = -1;
1545 input_buffer_queued_count_ = 0;
1547 return true;
1550 void V4L2VideoDecodeAccelerator::StartResolutionChange() {
1551 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1552 DCHECK_NE(decoder_state_, kUninitialized);
1553 DCHECK_NE(decoder_state_, kResetting);
1555 DVLOG(3) << "Initiate resolution change";
1557 if (!(StopDevicePoll() && StopOutputStream()))
1558 return;
1560 decoder_state_ = kChangingResolution;
1562 // Post a task to clean up buffers on child thread. This will also ensure
1563 // that we won't accept ReusePictureBuffer() anymore after that.
1564 child_task_runner_->PostTask(
1565 FROM_HERE,
1566 base::Bind(&V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers,
1567 weak_this_));
1570 void V4L2VideoDecodeAccelerator::FinishResolutionChange() {
1571 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1572 DCHECK_EQ(decoder_state_, kChangingResolution);
1573 DVLOG(3) << "FinishResolutionChange()";
1575 if (decoder_state_ == kError) {
1576 DVLOG(2) << "FinishResolutionChange(): early out: kError state";
1577 return;
1580 struct v4l2_format format;
1581 bool again;
1582 gfx::Size visible_size;
1583 bool ret = GetFormatInfo(&format, &visible_size, &again);
1584 if (!ret || again) {
1585 LOG(ERROR) << "Couldn't get format information after resolution change";
1586 NOTIFY_ERROR(PLATFORM_FAILURE);
1587 return;
1590 if (!CreateBuffersForFormat(format, visible_size)) {
1591 LOG(ERROR) << "Couldn't reallocate buffers after resolution change";
1592 NOTIFY_ERROR(PLATFORM_FAILURE);
1593 return;
1596 decoder_state_ = kDecoding;
1598 if (resolution_change_reset_pending_) {
1599 resolution_change_reset_pending_ = false;
1600 ResetTask();
1601 return;
1604 if (!StartDevicePoll())
1605 return;
1607 Enqueue();
1608 ScheduleDecodeBufferTaskIfNeeded();
1611 void V4L2VideoDecodeAccelerator::DevicePollTask(bool poll_device) {
1612 DVLOG(3) << "DevicePollTask()";
1613 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
1614 TRACE_EVENT0("Video Decoder", "V4L2VDA::DevicePollTask");
1616 bool event_pending = false;
1618 if (!device_->Poll(poll_device, &event_pending)) {
1619 NOTIFY_ERROR(PLATFORM_FAILURE);
1620 return;
1623 // All processing should happen on ServiceDeviceTask(), since we shouldn't
1624 // touch decoder state from this thread.
1625 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1626 &V4L2VideoDecodeAccelerator::ServiceDeviceTask,
1627 base::Unretained(this), event_pending));
1630 void V4L2VideoDecodeAccelerator::NotifyError(Error error) {
1631 DVLOG(2) << "NotifyError()";
1633 if (!child_task_runner_->BelongsToCurrentThread()) {
1634 child_task_runner_->PostTask(
1635 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::NotifyError,
1636 weak_this_, error));
1637 return;
1640 if (client_) {
1641 client_->NotifyError(error);
1642 client_ptr_factory_.reset();
1646 void V4L2VideoDecodeAccelerator::SetErrorState(Error error) {
1647 // We can touch decoder_state_ only if this is the decoder thread or the
1648 // decoder thread isn't running.
1649 if (decoder_thread_.message_loop() != NULL &&
1650 decoder_thread_.message_loop() != base::MessageLoop::current()) {
1651 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1652 &V4L2VideoDecodeAccelerator::SetErrorState,
1653 base::Unretained(this), error));
1654 return;
1657 // Post NotifyError only if we are already initialized, as the API does
1658 // not allow doing so before that.
1659 if (decoder_state_ != kError && decoder_state_ != kUninitialized)
1660 NotifyError(error);
1662 decoder_state_ = kError;
1665 bool V4L2VideoDecodeAccelerator::GetFormatInfo(struct v4l2_format* format,
1666 gfx::Size* visible_size,
1667 bool* again) {
1668 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1670 *again = false;
1671 memset(format, 0, sizeof(*format));
1672 format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1673 if (device_->Ioctl(VIDIOC_G_FMT, format) != 0) {
1674 if (errno == EINVAL) {
1675 // EINVAL means we haven't seen sufficient stream to decode the format.
1676 *again = true;
1677 return true;
1678 } else {
1679 PLOG(ERROR) << __func__ << "(): ioctl() failed: VIDIOC_G_FMT";
1680 NOTIFY_ERROR(PLATFORM_FAILURE);
1681 return false;
1685 // Make sure we are still getting the format we set on initialization.
1686 if (format->fmt.pix_mp.pixelformat != output_format_fourcc_) {
1687 LOG(ERROR) << "Unexpected format from G_FMT on output";
1688 return false;
1691 gfx::Size coded_size(format->fmt.pix_mp.width, format->fmt.pix_mp.height);
1692 if (visible_size != nullptr)
1693 *visible_size = GetVisibleSize(coded_size);
1695 return true;
1698 bool V4L2VideoDecodeAccelerator::CreateBuffersForFormat(
1699 const struct v4l2_format& format,
1700 const gfx::Size& visible_size) {
1701 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1702 output_planes_count_ = format.fmt.pix_mp.num_planes;
1703 coded_size_.SetSize(format.fmt.pix_mp.width, format.fmt.pix_mp.height);
1704 visible_size_ = visible_size;
1705 DVLOG(3) << "CreateBuffersForFormat(): new resolution: "
1706 << coded_size_.ToString() << ", visible size: "
1707 << visible_size_.ToString();
1709 return CreateOutputBuffers();
1712 gfx::Size V4L2VideoDecodeAccelerator::GetVisibleSize(
1713 const gfx::Size& coded_size) {
1714 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1716 struct v4l2_crop crop_arg;
1717 memset(&crop_arg, 0, sizeof(crop_arg));
1718 crop_arg.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1720 if (device_->Ioctl(VIDIOC_G_CROP, &crop_arg) != 0) {
1721 PLOG(ERROR) << "GetVisibleSize(): ioctl() VIDIOC_G_CROP failed";
1722 return coded_size;
1725 gfx::Rect rect(crop_arg.c.left, crop_arg.c.top, crop_arg.c.width,
1726 crop_arg.c.height);
1727 DVLOG(3) << "visible rectangle is " << rect.ToString();
1728 if (!gfx::Rect(coded_size).Contains(rect)) {
1729 DLOG(ERROR) << "visible rectangle " << rect.ToString()
1730 << " is not inside coded size " << coded_size.ToString();
1731 return coded_size;
1733 if (rect.IsEmpty()) {
1734 DLOG(ERROR) << "visible size is empty";
1735 return coded_size;
1738 // Chrome assume picture frame is coded at (0, 0).
1739 if (!rect.origin().IsOrigin()) {
1740 DLOG(ERROR) << "Unexpected visible rectangle " << rect.ToString()
1741 << ", top-left is not origin";
1742 return coded_size;
1745 return rect.size();
1748 bool V4L2VideoDecodeAccelerator::CreateInputBuffers() {
1749 DVLOG(3) << "CreateInputBuffers()";
1750 // We always run this as we prepare to initialize.
1751 DCHECK_EQ(decoder_state_, kUninitialized);
1752 DCHECK(!input_streamon_);
1753 DCHECK(input_buffer_map_.empty());
1755 struct v4l2_requestbuffers reqbufs;
1756 memset(&reqbufs, 0, sizeof(reqbufs));
1757 reqbufs.count = kInputBufferCount;
1758 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1759 reqbufs.memory = V4L2_MEMORY_MMAP;
1760 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1761 input_buffer_map_.resize(reqbufs.count);
1762 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1763 free_input_buffers_.push_back(i);
1765 // Query for the MEMORY_MMAP pointer.
1766 struct v4l2_plane planes[1];
1767 struct v4l2_buffer buffer;
1768 memset(&buffer, 0, sizeof(buffer));
1769 memset(planes, 0, sizeof(planes));
1770 buffer.index = i;
1771 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1772 buffer.memory = V4L2_MEMORY_MMAP;
1773 buffer.m.planes = planes;
1774 buffer.length = 1;
1775 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
1776 void* address = device_->Mmap(NULL,
1777 buffer.m.planes[0].length,
1778 PROT_READ | PROT_WRITE,
1779 MAP_SHARED,
1780 buffer.m.planes[0].m.mem_offset);
1781 if (address == MAP_FAILED) {
1782 PLOG(ERROR) << "CreateInputBuffers(): mmap() failed";
1783 return false;
1785 input_buffer_map_[i].address = address;
1786 input_buffer_map_[i].length = buffer.m.planes[0].length;
1789 return true;
1792 bool V4L2VideoDecodeAccelerator::SetupFormats() {
1793 // We always run this as we prepare to initialize.
1794 DCHECK_EQ(decoder_state_, kUninitialized);
1795 DCHECK(!input_streamon_);
1796 DCHECK(!output_streamon_);
1798 __u32 input_format_fourcc =
1799 V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, false);
1800 if (!input_format_fourcc) {
1801 NOTREACHED();
1802 return false;
1805 size_t input_size;
1806 gfx::Size max_resolution, min_resolution;
1807 device_->GetSupportedResolution(input_format_fourcc, &min_resolution,
1808 &max_resolution);
1809 if (max_resolution.width() > 1920 && max_resolution.height() > 1088)
1810 input_size = kInputBufferMaxSizeFor4k;
1811 else
1812 input_size = kInputBufferMaxSizeFor1080p;
1814 struct v4l2_fmtdesc fmtdesc;
1815 memset(&fmtdesc, 0, sizeof(fmtdesc));
1816 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1817 bool is_format_supported = false;
1818 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
1819 if (fmtdesc.pixelformat == input_format_fourcc) {
1820 is_format_supported = true;
1821 break;
1823 ++fmtdesc.index;
1826 if (!is_format_supported) {
1827 DVLOG(1) << "Input fourcc " << input_format_fourcc
1828 << " not supported by device.";
1829 return false;
1832 struct v4l2_format format;
1833 memset(&format, 0, sizeof(format));
1834 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1835 format.fmt.pix_mp.pixelformat = input_format_fourcc;
1836 format.fmt.pix_mp.plane_fmt[0].sizeimage = input_size;
1837 format.fmt.pix_mp.num_planes = 1;
1838 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1840 // We have to set up the format for output, because the driver may not allow
1841 // changing it once we start streaming; whether it can support our chosen
1842 // output format or not may depend on the input format.
1843 memset(&fmtdesc, 0, sizeof(fmtdesc));
1844 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1845 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
1846 if (device_->CanCreateEGLImageFrom(fmtdesc.pixelformat)) {
1847 output_format_fourcc_ = fmtdesc.pixelformat;
1848 break;
1850 ++fmtdesc.index;
1853 if (output_format_fourcc_ == 0) {
1854 LOG(ERROR) << "Could not find a usable output format";
1855 return false;
1858 // Just set the fourcc for output; resolution, etc., will come from the
1859 // driver once it extracts it from the stream.
1860 memset(&format, 0, sizeof(format));
1861 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1862 format.fmt.pix_mp.pixelformat = output_format_fourcc_;
1863 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1865 return true;
1868 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() {
1869 DVLOG(3) << "CreateOutputBuffers()";
1870 DCHECK(decoder_state_ == kInitialized ||
1871 decoder_state_ == kChangingResolution);
1872 DCHECK(!output_streamon_);
1873 DCHECK(output_buffer_map_.empty());
1875 // Number of output buffers we need.
1876 struct v4l2_control ctrl;
1877 memset(&ctrl, 0, sizeof(ctrl));
1878 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1879 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
1880 output_dpb_size_ = ctrl.value;
1882 // Output format setup in Initialize().
1884 const uint32_t buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount;
1885 DVLOG(3) << "CreateOutputBuffers(): ProvidePictureBuffers(): "
1886 << "buffer_count=" << buffer_count
1887 << ", coded_size=" << coded_size_.ToString();
1888 child_task_runner_->PostTask(
1889 FROM_HERE, base::Bind(&Client::ProvidePictureBuffers, client_,
1890 buffer_count, coded_size_,
1891 device_->GetTextureTarget()));
1893 // Wait for the client to call AssignPictureBuffers() on the Child thread.
1894 // We do this, because if we continue decoding without finishing buffer
1895 // allocation, we may end up Resetting before AssignPictureBuffers arrives,
1896 // resulting in unnecessary complications and subtle bugs.
1897 // For example, if the client calls Decode(Input1), Reset(), Decode(Input2)
1898 // in a sequence, and Decode(Input1) results in us getting here and exiting
1899 // without waiting, we might end up running Reset{,Done}Task() before
1900 // AssignPictureBuffers is scheduled, thus cleaning up and pushing buffers
1901 // to the free_output_buffers_ map twice. If we somehow marked buffers as
1902 // not ready, we'd need special handling for restarting the second Decode
1903 // task and delaying it anyway.
1904 // Waiting here is not very costly and makes reasoning about different
1905 // situations much simpler.
1906 pictures_assigned_.Wait();
1908 Enqueue();
1909 return true;
1912 void V4L2VideoDecodeAccelerator::DestroyInputBuffers() {
1913 DVLOG(3) << "DestroyInputBuffers()";
1914 DCHECK(child_task_runner_->BelongsToCurrentThread());
1915 DCHECK(!input_streamon_);
1917 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1918 if (input_buffer_map_[i].address != NULL) {
1919 device_->Munmap(input_buffer_map_[i].address,
1920 input_buffer_map_[i].length);
1924 struct v4l2_requestbuffers reqbufs;
1925 memset(&reqbufs, 0, sizeof(reqbufs));
1926 reqbufs.count = 0;
1927 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1928 reqbufs.memory = V4L2_MEMORY_MMAP;
1929 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1931 input_buffer_map_.clear();
1932 free_input_buffers_.clear();
1935 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() {
1936 DVLOG(3) << "DestroyOutputBuffers()";
1937 DCHECK(child_task_runner_->BelongsToCurrentThread());
1938 DCHECK(!output_streamon_);
1939 bool success = true;
1941 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1942 OutputRecord& output_record = output_buffer_map_[i];
1944 if (output_record.egl_image != EGL_NO_IMAGE_KHR) {
1945 if (device_->DestroyEGLImage(egl_display_, output_record.egl_image) !=
1946 EGL_TRUE) {
1947 DVLOG(1) << __func__ << " DestroyEGLImage failed.";
1948 success = false;
1952 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1953 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1954 DVLOG(1) << __func__ << " eglDestroySyncKHR failed.";
1955 success = false;
1959 DVLOG(1) << "DestroyOutputBuffers(): dismissing PictureBuffer id="
1960 << output_record.picture_id;
1961 child_task_runner_->PostTask(
1962 FROM_HERE, base::Bind(&Client::DismissPictureBuffer, client_,
1963 output_record.picture_id));
1966 struct v4l2_requestbuffers reqbufs;
1967 memset(&reqbufs, 0, sizeof(reqbufs));
1968 reqbufs.count = 0;
1969 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1970 reqbufs.memory = V4L2_MEMORY_MMAP;
1971 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
1972 PLOG(ERROR) << "DestroyOutputBuffers() ioctl() failed: VIDIOC_REQBUFS";
1973 success = false;
1976 output_buffer_map_.clear();
1977 while (!free_output_buffers_.empty())
1978 free_output_buffers_.pop();
1980 return success;
1983 void V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers() {
1984 DCHECK(child_task_runner_->BelongsToCurrentThread());
1985 DVLOG(3) << "ResolutionChangeDestroyBuffers()";
1987 if (!DestroyOutputBuffers()) {
1988 LOG(ERROR) << __func__ << " Failed destroying output buffers.";
1989 NOTIFY_ERROR(PLATFORM_FAILURE);
1990 return;
1993 // Finish resolution change on decoder thread.
1994 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1995 &V4L2VideoDecodeAccelerator::FinishResolutionChange,
1996 base::Unretained(this)));
1999 void V4L2VideoDecodeAccelerator::SendPictureReady() {
2000 DVLOG(3) << "SendPictureReady()";
2001 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
2002 bool resetting_or_flushing =
2003 (decoder_state_ == kResetting || decoder_flushing_);
2004 while (pending_picture_ready_.size() > 0) {
2005 bool cleared = pending_picture_ready_.front().cleared;
2006 const media::Picture& picture = pending_picture_ready_.front().picture;
2007 if (cleared && picture_clearing_count_ == 0) {
2008 // This picture is cleared. Post it to IO thread to reduce latency. This
2009 // should be the case after all pictures are cleared at the beginning.
2010 io_task_runner_->PostTask(
2011 FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture));
2012 pending_picture_ready_.pop();
2013 } else if (!cleared || resetting_or_flushing) {
2014 DVLOG(3) << "SendPictureReady()"
2015 << ". cleared=" << pending_picture_ready_.front().cleared
2016 << ", decoder_state_=" << decoder_state_
2017 << ", decoder_flushing_=" << decoder_flushing_
2018 << ", picture_clearing_count_=" << picture_clearing_count_;
2019 // If the picture is not cleared, post it to the child thread because it
2020 // has to be cleared in the child thread. A picture only needs to be
2021 // cleared once. If the decoder is resetting or flushing, send all
2022 // pictures to ensure PictureReady arrive before reset or flush done.
2023 child_task_runner_->PostTaskAndReply(
2024 FROM_HERE, base::Bind(&Client::PictureReady, client_, picture),
2025 // Unretained is safe. If Client::PictureReady gets to run, |this| is
2026 // alive. Destroy() will wait the decode thread to finish.
2027 base::Bind(&V4L2VideoDecodeAccelerator::PictureCleared,
2028 base::Unretained(this)));
2029 picture_clearing_count_++;
2030 pending_picture_ready_.pop();
2031 } else {
2032 // This picture is cleared. But some pictures are about to be cleared on
2033 // the child thread. To preserve the order, do not send this until those
2034 // pictures are cleared.
2035 break;
2040 void V4L2VideoDecodeAccelerator::PictureCleared() {
2041 DVLOG(3) << "PictureCleared(). clearing count=" << picture_clearing_count_;
2042 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
2043 DCHECK_GT(picture_clearing_count_, 0);
2044 picture_clearing_count_--;
2045 SendPictureReady();
2048 } // namespace content