Update broken references to image assets
[chromium-blink-merge.git] / content / common / gpu / media / v4l2_video_decode_accelerator.cc
blob8d6f76572556afffce1512e1347a3c9155c54f86
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <dlfcn.h>
6 #include <errno.h>
7 #include <fcntl.h>
8 #include <linux/videodev2.h>
9 #include <poll.h>
10 #include <sys/eventfd.h>
11 #include <sys/ioctl.h>
12 #include <sys/mman.h>
14 #include "base/bind.h"
15 #include "base/command_line.h"
16 #include "base/memory/shared_memory.h"
17 #include "base/message_loop/message_loop.h"
18 #include "base/numerics/safe_conversions.h"
19 #include "base/thread_task_runner_handle.h"
20 #include "base/trace_event/trace_event.h"
21 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
22 #include "media/base/media_switches.h"
23 #include "media/filters/h264_parser.h"
24 #include "ui/gfx/geometry/rect.h"
25 #include "ui/gl/scoped_binders.h"
27 #define NOTIFY_ERROR(x) \
28 do { \
29 LOG(ERROR) << "Setting error state:" << x; \
30 SetErrorState(x); \
31 } while (0)
33 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \
34 do { \
35 if (device_->Ioctl(type, arg) != 0) { \
36 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << type_str; \
37 NOTIFY_ERROR(PLATFORM_FAILURE); \
38 return value; \
39 } \
40 } while (0)
42 #define IOCTL_OR_ERROR_RETURN(type, arg) \
43 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type)
45 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
46 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type)
48 #define IOCTL_OR_LOG_ERROR(type, arg) \
49 do { \
50 if (device_->Ioctl(type, arg) != 0) \
51 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
52 } while (0)
54 namespace content {
56 namespace {
58 // TODO(posciak): remove once we update linux-headers.
59 #ifndef V4L2_EVENT_RESOLUTION_CHANGE
60 #define V4L2_EVENT_RESOLUTION_CHANGE 5
61 #endif
63 } // anonymous namespace
65 struct V4L2VideoDecodeAccelerator::BitstreamBufferRef {
66 BitstreamBufferRef(
67 base::WeakPtr<Client>& client,
68 scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
69 base::SharedMemory* shm,
70 size_t size,
71 int32 input_id);
72 ~BitstreamBufferRef();
73 const base::WeakPtr<Client> client;
74 const scoped_refptr<base::SingleThreadTaskRunner> client_task_runner;
75 const scoped_ptr<base::SharedMemory> shm;
76 const size_t size;
77 size_t bytes_used;
78 const int32 input_id;
81 struct V4L2VideoDecodeAccelerator::EGLSyncKHRRef {
82 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
83 ~EGLSyncKHRRef();
84 EGLDisplay const egl_display;
85 EGLSyncKHR egl_sync;
88 struct V4L2VideoDecodeAccelerator::PictureRecord {
89 PictureRecord(bool cleared, const media::Picture& picture);
90 ~PictureRecord();
91 bool cleared; // Whether the texture is cleared and safe to render from.
92 media::Picture picture; // The decoded picture.
95 V4L2VideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
96 base::WeakPtr<Client>& client,
97 scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
98 base::SharedMemory* shm,
99 size_t size,
100 int32 input_id)
101 : client(client),
102 client_task_runner(client_task_runner),
103 shm(shm),
104 size(size),
105 bytes_used(0),
106 input_id(input_id) {
109 V4L2VideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
110 if (input_id >= 0) {
111 client_task_runner->PostTask(
112 FROM_HERE,
113 base::Bind(&Client::NotifyEndOfBitstreamBuffer, client, input_id));
117 V4L2VideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
118 EGLDisplay egl_display, EGLSyncKHR egl_sync)
119 : egl_display(egl_display),
120 egl_sync(egl_sync) {
123 V4L2VideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
124 // We don't check for eglDestroySyncKHR failures, because if we get here
125 // with a valid sync object, something went wrong and we are getting
126 // destroyed anyway.
127 if (egl_sync != EGL_NO_SYNC_KHR)
128 eglDestroySyncKHR(egl_display, egl_sync);
131 V4L2VideoDecodeAccelerator::InputRecord::InputRecord()
132 : at_device(false),
133 address(NULL),
134 length(0),
135 bytes_used(0),
136 input_id(-1) {
139 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {
142 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord()
143 : at_device(false),
144 at_client(false),
145 egl_image(EGL_NO_IMAGE_KHR),
146 egl_sync(EGL_NO_SYNC_KHR),
147 picture_id(-1),
148 cleared(false) {
151 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {}
153 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(
154 bool cleared,
155 const media::Picture& picture)
156 : cleared(cleared), picture(picture) {}
158 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
160 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator(
161 EGLDisplay egl_display,
162 EGLContext egl_context,
163 const base::WeakPtr<Client>& io_client,
164 const base::Callback<bool(void)>& make_context_current,
165 const scoped_refptr<V4L2Device>& device,
166 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
167 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
168 io_task_runner_(io_task_runner),
169 io_client_(io_client),
170 decoder_thread_("V4L2DecoderThread"),
171 decoder_state_(kUninitialized),
172 device_(device),
173 decoder_delay_bitstream_buffer_id_(-1),
174 decoder_current_input_buffer_(-1),
175 decoder_decode_buffer_tasks_scheduled_(0),
176 decoder_frames_at_client_(0),
177 decoder_flushing_(false),
178 resolution_change_reset_pending_(false),
179 decoder_partial_frame_pending_(false),
180 input_streamon_(false),
181 input_buffer_queued_count_(0),
182 output_streamon_(false),
183 output_buffer_queued_count_(0),
184 output_dpb_size_(0),
185 output_planes_count_(0),
186 picture_clearing_count_(0),
187 pictures_assigned_(false, false),
188 device_poll_thread_("V4L2DevicePollThread"),
189 make_context_current_(make_context_current),
190 egl_display_(egl_display),
191 egl_context_(egl_context),
192 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN),
193 output_format_fourcc_(0),
194 weak_this_factory_(this) {
195 weak_this_ = weak_this_factory_.GetWeakPtr();
198 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() {
199 DCHECK(!decoder_thread_.IsRunning());
200 DCHECK(!device_poll_thread_.IsRunning());
202 DestroyInputBuffers();
203 DestroyOutputBuffers();
205 // These maps have members that should be manually destroyed, e.g. file
206 // descriptors, mmap() segments, etc.
207 DCHECK(input_buffer_map_.empty());
208 DCHECK(output_buffer_map_.empty());
211 bool V4L2VideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
212 Client* client) {
213 DVLOG(3) << "Initialize()";
214 DCHECK(child_task_runner_->BelongsToCurrentThread());
215 DCHECK_EQ(decoder_state_, kUninitialized);
217 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
218 client_ = client_ptr_factory_->GetWeakPtr();
220 switch (profile) {
221 case media::H264PROFILE_BASELINE:
222 DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE";
223 break;
224 case media::H264PROFILE_MAIN:
225 DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN";
226 break;
227 case media::H264PROFILE_HIGH:
228 DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH";
229 break;
230 case media::VP8PROFILE_ANY:
231 DVLOG(2) << "Initialize(): profile VP8PROFILE_ANY";
232 break;
233 case media::VP9PROFILE_ANY:
234 DVLOG(2) << "Initialize(): profile VP9PROFILE_ANY";
235 break;
236 default:
237 DLOG(ERROR) << "Initialize(): unsupported profile=" << profile;
238 return false;
240 video_profile_ = profile;
242 if (egl_display_ == EGL_NO_DISPLAY) {
243 LOG(ERROR) << "Initialize(): could not get EGLDisplay";
244 return false;
247 // We need the context to be initialized to query extensions.
248 if (!make_context_current_.Run()) {
249 LOG(ERROR) << "Initialize(): could not make context current";
250 return false;
253 // TODO(posciak): crbug.com/450898.
254 #if defined(ARCH_CPU_ARMEL)
255 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
256 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync";
257 return false;
259 #endif
261 // Capabilities check.
262 struct v4l2_capability caps;
263 const __u32 kCapsRequired =
264 V4L2_CAP_VIDEO_CAPTURE_MPLANE |
265 V4L2_CAP_VIDEO_OUTPUT_MPLANE |
266 V4L2_CAP_STREAMING;
267 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
268 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
269 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
270 ", caps check failed: 0x" << std::hex << caps.capabilities;
271 return false;
274 if (!SetupFormats())
275 return false;
277 // Subscribe to the resolution change event.
278 struct v4l2_event_subscription sub;
279 memset(&sub, 0, sizeof(sub));
280 sub.type = V4L2_EVENT_RESOLUTION_CHANGE;
281 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_SUBSCRIBE_EVENT, &sub);
283 if (video_profile_ >= media::H264PROFILE_MIN &&
284 video_profile_ <= media::H264PROFILE_MAX) {
285 decoder_h264_parser_.reset(new media::H264Parser());
288 if (!CreateInputBuffers())
289 return false;
291 if (!decoder_thread_.Start()) {
292 LOG(ERROR) << "Initialize(): decoder thread failed to start";
293 return false;
296 decoder_state_ = kInitialized;
298 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here.
299 decoder_thread_.message_loop()->PostTask(
300 FROM_HERE,
301 base::Bind(
302 base::IgnoreResult(&V4L2VideoDecodeAccelerator::StartDevicePoll),
303 base::Unretained(this)));
305 return true;
308 void V4L2VideoDecodeAccelerator::Decode(
309 const media::BitstreamBuffer& bitstream_buffer) {
310 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
311 << ", size=" << bitstream_buffer.size();
312 DCHECK(io_task_runner_->BelongsToCurrentThread());
314 // DecodeTask() will take care of running a DecodeBufferTask().
315 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
316 &V4L2VideoDecodeAccelerator::DecodeTask, base::Unretained(this),
317 bitstream_buffer));
320 void V4L2VideoDecodeAccelerator::AssignPictureBuffers(
321 const std::vector<media::PictureBuffer>& buffers) {
322 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size();
323 DCHECK(child_task_runner_->BelongsToCurrentThread());
325 const uint32_t req_buffer_count =
326 output_dpb_size_ + kDpbOutputBufferExtraCount;
328 if (buffers.size() < req_buffer_count) {
329 LOG(ERROR) << "AssignPictureBuffers(): Failed to provide requested picture"
330 " buffers. (Got " << buffers.size()
331 << ", requested " << req_buffer_count << ")";
332 NOTIFY_ERROR(INVALID_ARGUMENT);
333 return;
336 if (!make_context_current_.Run()) {
337 LOG(ERROR) << "AssignPictureBuffers(): could not make context current";
338 NOTIFY_ERROR(PLATFORM_FAILURE);
339 return;
342 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0);
344 // It's safe to manipulate all the buffer state here, because the decoder
345 // thread is waiting on pictures_assigned_.
347 // Allocate the output buffers.
348 struct v4l2_requestbuffers reqbufs;
349 memset(&reqbufs, 0, sizeof(reqbufs));
350 reqbufs.count = buffers.size();
351 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
352 reqbufs.memory = V4L2_MEMORY_MMAP;
353 IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs);
355 if (reqbufs.count != buffers.size()) {
356 DLOG(ERROR) << "Could not allocate enough output buffers";
357 NOTIFY_ERROR(PLATFORM_FAILURE);
358 return;
361 output_buffer_map_.resize(buffers.size());
363 DCHECK(free_output_buffers_.empty());
364 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
365 DCHECK(buffers[i].size() == coded_size_);
367 OutputRecord& output_record = output_buffer_map_[i];
368 DCHECK(!output_record.at_device);
369 DCHECK(!output_record.at_client);
370 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
371 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
372 DCHECK_EQ(output_record.picture_id, -1);
373 DCHECK_EQ(output_record.cleared, false);
375 EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_,
376 egl_context_,
377 buffers[i].texture_id(),
378 coded_size_,
380 output_format_fourcc_,
381 output_planes_count_);
382 if (egl_image == EGL_NO_IMAGE_KHR) {
383 LOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
384 // Ownership of EGLImages allocated in previous iterations of this loop
385 // has been transferred to output_buffer_map_. After we error-out here
386 // the destructor will handle their cleanup.
387 NOTIFY_ERROR(PLATFORM_FAILURE);
388 return;
391 output_record.egl_image = egl_image;
392 output_record.picture_id = buffers[i].id();
393 free_output_buffers_.push(i);
394 DVLOG(3) << "AssignPictureBuffers(): buffer[" << i
395 << "]: picture_id=" << output_record.picture_id;
398 pictures_assigned_.Signal();
401 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) {
402 DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id;
403 // Must be run on child thread, as we'll insert a sync in the EGL context.
404 DCHECK(child_task_runner_->BelongsToCurrentThread());
406 if (!make_context_current_.Run()) {
407 LOG(ERROR) << "ReusePictureBuffer(): could not make context current";
408 NOTIFY_ERROR(PLATFORM_FAILURE);
409 return;
412 EGLSyncKHR egl_sync = EGL_NO_SYNC_KHR;
413 // TODO(posciak): crbug.com/450898.
414 #if defined(ARCH_CPU_ARMEL)
415 egl_sync = eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
416 if (egl_sync == EGL_NO_SYNC_KHR) {
417 LOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed";
418 NOTIFY_ERROR(PLATFORM_FAILURE);
419 return;
421 #endif
423 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef(
424 egl_display_, egl_sync));
425 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
426 &V4L2VideoDecodeAccelerator::ReusePictureBufferTask,
427 base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref)));
430 void V4L2VideoDecodeAccelerator::Flush() {
431 DVLOG(3) << "Flush()";
432 DCHECK(child_task_runner_->BelongsToCurrentThread());
433 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
434 &V4L2VideoDecodeAccelerator::FlushTask, base::Unretained(this)));
437 void V4L2VideoDecodeAccelerator::Reset() {
438 DVLOG(3) << "Reset()";
439 DCHECK(child_task_runner_->BelongsToCurrentThread());
440 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
441 &V4L2VideoDecodeAccelerator::ResetTask, base::Unretained(this)));
444 void V4L2VideoDecodeAccelerator::Destroy() {
445 DVLOG(3) << "Destroy()";
446 DCHECK(child_task_runner_->BelongsToCurrentThread());
448 // We're destroying; cancel all callbacks.
449 client_ptr_factory_.reset();
450 weak_this_factory_.InvalidateWeakPtrs();
452 // If the decoder thread is running, destroy using posted task.
453 if (decoder_thread_.IsRunning()) {
454 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
455 &V4L2VideoDecodeAccelerator::DestroyTask, base::Unretained(this)));
456 pictures_assigned_.Signal();
457 // DestroyTask() will cause the decoder_thread_ to flush all tasks.
458 decoder_thread_.Stop();
459 } else {
460 // Otherwise, call the destroy task directly.
461 DestroyTask();
464 delete this;
467 bool V4L2VideoDecodeAccelerator::CanDecodeOnIOThread() { return true; }
469 // static
470 media::VideoDecodeAccelerator::SupportedProfiles
471 V4L2VideoDecodeAccelerator::GetSupportedProfiles() {
472 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder);
473 if (!device)
474 return SupportedProfiles();
476 const uint32_t supported_formats[] = {
477 V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9};
478 return device->GetSupportedDecodeProfiles(arraysize(supported_formats),
479 supported_formats);
482 void V4L2VideoDecodeAccelerator::DecodeTask(
483 const media::BitstreamBuffer& bitstream_buffer) {
484 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_buffer.id();
485 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
486 DCHECK_NE(decoder_state_, kUninitialized);
487 TRACE_EVENT1("Video Decoder", "V4L2VDA::DecodeTask", "input_id",
488 bitstream_buffer.id());
490 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
491 io_client_, io_task_runner_,
492 new base::SharedMemory(bitstream_buffer.handle(), true),
493 bitstream_buffer.size(), bitstream_buffer.id()));
494 if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
495 LOG(ERROR) << "Decode(): could not map bitstream_buffer";
496 NOTIFY_ERROR(UNREADABLE_INPUT);
497 return;
499 DVLOG(3) << "DecodeTask(): mapped at=" << bitstream_record->shm->memory();
501 if (decoder_state_ == kResetting || decoder_flushing_) {
502 // In the case that we're resetting or flushing, we need to delay decoding
503 // the BitstreamBuffers that come after the Reset() or Flush() call. When
504 // we're here, we know that this DecodeTask() was scheduled by a Decode()
505 // call that came after (in the client thread) the Reset() or Flush() call;
506 // thus set up the delay if necessary.
507 if (decoder_delay_bitstream_buffer_id_ == -1)
508 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
509 } else if (decoder_state_ == kError) {
510 DVLOG(2) << "DecodeTask(): early out: kError state";
511 return;
514 decoder_input_queue_.push(
515 linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
516 decoder_decode_buffer_tasks_scheduled_++;
517 DecodeBufferTask();
520 void V4L2VideoDecodeAccelerator::DecodeBufferTask() {
521 DVLOG(3) << "DecodeBufferTask()";
522 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
523 DCHECK_NE(decoder_state_, kUninitialized);
524 TRACE_EVENT0("Video Decoder", "V4L2VDA::DecodeBufferTask");
526 decoder_decode_buffer_tasks_scheduled_--;
528 if (decoder_state_ == kResetting) {
529 DVLOG(2) << "DecodeBufferTask(): early out: kResetting state";
530 return;
531 } else if (decoder_state_ == kError) {
532 DVLOG(2) << "DecodeBufferTask(): early out: kError state";
533 return;
534 } else if (decoder_state_ == kChangingResolution) {
535 DVLOG(2) << "DecodeBufferTask(): early out: resolution change pending";
536 return;
539 if (decoder_current_bitstream_buffer_ == NULL) {
540 if (decoder_input_queue_.empty()) {
541 // We're waiting for a new buffer -- exit without scheduling a new task.
542 return;
544 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
545 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
546 // We're asked to delay decoding on this and subsequent buffers.
547 return;
550 // Setup to use the next buffer.
551 decoder_current_bitstream_buffer_.reset(buffer_ref.release());
552 decoder_input_queue_.pop();
553 DVLOG(3) << "DecodeBufferTask(): reading input_id="
554 << decoder_current_bitstream_buffer_->input_id
555 << ", addr=" << (decoder_current_bitstream_buffer_->shm ?
556 decoder_current_bitstream_buffer_->shm->memory() :
557 NULL)
558 << ", size=" << decoder_current_bitstream_buffer_->size;
560 bool schedule_task = false;
561 const size_t size = decoder_current_bitstream_buffer_->size;
562 size_t decoded_size = 0;
563 if (size == 0) {
564 const int32 input_id = decoder_current_bitstream_buffer_->input_id;
565 if (input_id >= 0) {
566 // This is a buffer queued from the client that has zero size. Skip.
567 schedule_task = true;
568 } else {
569 // This is a buffer of zero size, queued to flush the pipe. Flush.
570 DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(),
571 static_cast<base::SharedMemory*>(NULL));
572 // Enqueue a buffer guaranteed to be empty. To do that, we flush the
573 // current input, enqueue no data to the next frame, then flush that down.
574 schedule_task = true;
575 if (decoder_current_input_buffer_ != -1 &&
576 input_buffer_map_[decoder_current_input_buffer_].input_id !=
577 kFlushBufferId)
578 schedule_task = FlushInputFrame();
580 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
581 DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer";
582 decoder_partial_frame_pending_ = false;
583 schedule_task = true;
584 } else {
585 // If we failed to enqueue the empty buffer (due to pipeline
586 // backpressure), don't advance the bitstream buffer queue, and don't
587 // schedule the next task. This bitstream buffer queue entry will get
588 // reprocessed when the pipeline frees up.
589 schedule_task = false;
592 } else {
593 // This is a buffer queued from the client, with actual contents. Decode.
594 const uint8* const data =
595 reinterpret_cast<const uint8*>(
596 decoder_current_bitstream_buffer_->shm->memory()) +
597 decoder_current_bitstream_buffer_->bytes_used;
598 const size_t data_size =
599 decoder_current_bitstream_buffer_->size -
600 decoder_current_bitstream_buffer_->bytes_used;
601 if (!AdvanceFrameFragment(data, data_size, &decoded_size)) {
602 NOTIFY_ERROR(UNREADABLE_INPUT);
603 return;
605 // AdvanceFrameFragment should not return a size larger than the buffer
606 // size, even on invalid data.
607 CHECK_LE(decoded_size, data_size);
609 switch (decoder_state_) {
610 case kInitialized:
611 case kAfterReset:
612 schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
613 break;
614 case kDecoding:
615 schedule_task = DecodeBufferContinue(data, decoded_size);
616 break;
617 default:
618 NOTIFY_ERROR(ILLEGAL_STATE);
619 return;
622 if (decoder_state_ == kError) {
623 // Failed during decode.
624 return;
627 if (schedule_task) {
628 decoder_current_bitstream_buffer_->bytes_used += decoded_size;
629 if (decoder_current_bitstream_buffer_->bytes_used ==
630 decoder_current_bitstream_buffer_->size) {
631 // Our current bitstream buffer is done; return it.
632 int32 input_id = decoder_current_bitstream_buffer_->input_id;
633 DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id;
634 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
635 decoder_current_bitstream_buffer_.reset();
637 ScheduleDecodeBufferTaskIfNeeded();
641 bool V4L2VideoDecodeAccelerator::AdvanceFrameFragment(
642 const uint8* data,
643 size_t size,
644 size_t* endpos) {
645 if (video_profile_ >= media::H264PROFILE_MIN &&
646 video_profile_ <= media::H264PROFILE_MAX) {
647 // For H264, we need to feed HW one frame at a time. This is going to take
648 // some parsing of our input stream.
649 decoder_h264_parser_->SetStream(data, size);
650 media::H264NALU nalu;
651 media::H264Parser::Result result;
652 *endpos = 0;
654 // Keep on peeking the next NALs while they don't indicate a frame
655 // boundary.
656 for (;;) {
657 bool end_of_frame = false;
658 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
659 if (result == media::H264Parser::kInvalidStream ||
660 result == media::H264Parser::kUnsupportedStream)
661 return false;
662 if (result == media::H264Parser::kEOStream) {
663 // We've reached the end of the buffer before finding a frame boundary.
664 decoder_partial_frame_pending_ = true;
665 return true;
667 switch (nalu.nal_unit_type) {
668 case media::H264NALU::kNonIDRSlice:
669 case media::H264NALU::kIDRSlice:
670 if (nalu.size < 1)
671 return false;
672 // For these two, if the "first_mb_in_slice" field is zero, start a
673 // new frame and return. This field is Exp-Golomb coded starting on
674 // the eighth data bit of the NAL; a zero value is encoded with a
675 // leading '1' bit in the byte, which we can detect as the byte being
676 // (unsigned) greater than or equal to 0x80.
677 if (nalu.data[1] >= 0x80) {
678 end_of_frame = true;
679 break;
681 break;
682 case media::H264NALU::kSEIMessage:
683 case media::H264NALU::kSPS:
684 case media::H264NALU::kPPS:
685 case media::H264NALU::kAUD:
686 case media::H264NALU::kEOSeq:
687 case media::H264NALU::kEOStream:
688 case media::H264NALU::kReserved14:
689 case media::H264NALU::kReserved15:
690 case media::H264NALU::kReserved16:
691 case media::H264NALU::kReserved17:
692 case media::H264NALU::kReserved18:
693 // These unconditionally signal a frame boundary.
694 end_of_frame = true;
695 break;
696 default:
697 // For all others, keep going.
698 break;
700 if (end_of_frame) {
701 if (!decoder_partial_frame_pending_ && *endpos == 0) {
702 // The frame was previously restarted, and we haven't filled the
703 // current frame with any contents yet. Start the new frame here and
704 // continue parsing NALs.
705 } else {
706 // The frame wasn't previously restarted and/or we have contents for
707 // the current frame; signal the start of a new frame here: we don't
708 // have a partial frame anymore.
709 decoder_partial_frame_pending_ = false;
710 return true;
713 *endpos = (nalu.data + nalu.size) - data;
715 NOTREACHED();
716 return false;
717 } else {
718 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN);
719 DCHECK_LE(video_profile_, media::VP9PROFILE_MAX);
720 // For VP8/9, we can just dump the entire buffer. No fragmentation needed,
721 // and we never return a partial frame.
722 *endpos = size;
723 decoder_partial_frame_pending_ = false;
724 return true;
728 void V4L2VideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
729 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
731 // If we're behind on tasks, schedule another one.
732 int buffers_to_decode = decoder_input_queue_.size();
733 if (decoder_current_bitstream_buffer_ != NULL)
734 buffers_to_decode++;
735 if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
736 decoder_decode_buffer_tasks_scheduled_++;
737 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
738 &V4L2VideoDecodeAccelerator::DecodeBufferTask,
739 base::Unretained(this)));
743 bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(
744 const void* data, size_t size, size_t* endpos) {
745 DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size;
746 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
747 DCHECK_NE(decoder_state_, kUninitialized);
748 DCHECK_NE(decoder_state_, kDecoding);
749 // Initial decode. We haven't been able to get output stream format info yet.
750 // Get it, and start decoding.
752 // Copy in and send to HW.
753 if (!AppendToInputFrame(data, size))
754 return false;
756 // If we only have a partial frame, don't flush and process yet.
757 if (decoder_partial_frame_pending_)
758 return true;
760 if (!FlushInputFrame())
761 return false;
763 // Recycle buffers.
764 Dequeue();
766 // Check and see if we have format info yet.
767 struct v4l2_format format;
768 gfx::Size visible_size;
769 bool again = false;
770 if (!GetFormatInfo(&format, &visible_size, &again))
771 return false;
773 *endpos = size;
775 if (again) {
776 // Need more stream to decode format, return true and schedule next buffer.
777 return true;
780 // Run this initialization only on first startup.
781 if (decoder_state_ == kInitialized) {
782 DVLOG(3) << "DecodeBufferInitial(): running initialization";
783 // Success! Setup our parameters.
784 if (!CreateBuffersForFormat(format, visible_size))
785 return false;
788 decoder_state_ = kDecoding;
789 ScheduleDecodeBufferTaskIfNeeded();
790 return true;
793 bool V4L2VideoDecodeAccelerator::DecodeBufferContinue(
794 const void* data, size_t size) {
795 DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size;
796 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
797 DCHECK_EQ(decoder_state_, kDecoding);
799 // Both of these calls will set kError state if they fail.
800 // Only flush the frame if it's complete.
801 return (AppendToInputFrame(data, size) &&
802 (decoder_partial_frame_pending_ || FlushInputFrame()));
805 bool V4L2VideoDecodeAccelerator::AppendToInputFrame(
806 const void* data, size_t size) {
807 DVLOG(3) << "AppendToInputFrame()";
808 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
809 DCHECK_NE(decoder_state_, kUninitialized);
810 DCHECK_NE(decoder_state_, kResetting);
811 DCHECK_NE(decoder_state_, kError);
812 // This routine can handle data == NULL and size == 0, which occurs when
813 // we queue an empty buffer for the purposes of flushing the pipe.
815 // Flush if we're too big
816 if (decoder_current_input_buffer_ != -1) {
817 InputRecord& input_record =
818 input_buffer_map_[decoder_current_input_buffer_];
819 if (input_record.bytes_used + size > input_record.length) {
820 if (!FlushInputFrame())
821 return false;
822 decoder_current_input_buffer_ = -1;
826 // Try to get an available input buffer
827 if (decoder_current_input_buffer_ == -1) {
828 if (free_input_buffers_.empty()) {
829 // See if we can get more free buffers from HW
830 Dequeue();
831 if (free_input_buffers_.empty()) {
832 // Nope!
833 DVLOG(2) << "AppendToInputFrame(): stalled for input buffers";
834 return false;
837 decoder_current_input_buffer_ = free_input_buffers_.back();
838 free_input_buffers_.pop_back();
839 InputRecord& input_record =
840 input_buffer_map_[decoder_current_input_buffer_];
841 DCHECK_EQ(input_record.bytes_used, 0);
842 DCHECK_EQ(input_record.input_id, -1);
843 DCHECK(decoder_current_bitstream_buffer_ != NULL);
844 input_record.input_id = decoder_current_bitstream_buffer_->input_id;
847 DCHECK(data != NULL || size == 0);
848 if (size == 0) {
849 // If we asked for an empty buffer, return now. We return only after
850 // getting the next input buffer, since we might actually want an empty
851 // input buffer for flushing purposes.
852 return true;
855 // Copy in to the buffer.
856 InputRecord& input_record =
857 input_buffer_map_[decoder_current_input_buffer_];
858 if (size > input_record.length - input_record.bytes_used) {
859 LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring";
860 NOTIFY_ERROR(UNREADABLE_INPUT);
861 return false;
863 memcpy(
864 reinterpret_cast<uint8*>(input_record.address) + input_record.bytes_used,
865 data,
866 size);
867 input_record.bytes_used += size;
869 return true;
872 bool V4L2VideoDecodeAccelerator::FlushInputFrame() {
873 DVLOG(3) << "FlushInputFrame()";
874 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
875 DCHECK_NE(decoder_state_, kUninitialized);
876 DCHECK_NE(decoder_state_, kResetting);
877 DCHECK_NE(decoder_state_, kError);
879 if (decoder_current_input_buffer_ == -1)
880 return true;
882 InputRecord& input_record =
883 input_buffer_map_[decoder_current_input_buffer_];
884 DCHECK_NE(input_record.input_id, -1);
885 DCHECK(input_record.input_id != kFlushBufferId ||
886 input_record.bytes_used == 0);
887 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
888 // got from the client. We can skip it if it is empty.
889 // * if input_id < 0 (should be kFlushBufferId in this case), this input
890 // buffer was prompted by a flush buffer, and should be queued even when
891 // empty.
892 if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
893 input_record.input_id = -1;
894 free_input_buffers_.push_back(decoder_current_input_buffer_);
895 decoder_current_input_buffer_ = -1;
896 return true;
899 // Queue it.
900 input_ready_queue_.push(decoder_current_input_buffer_);
901 decoder_current_input_buffer_ = -1;
902 DVLOG(3) << "FlushInputFrame(): submitting input_id="
903 << input_record.input_id;
904 // Enqueue once since there's new available input for it.
905 Enqueue();
907 return (decoder_state_ != kError);
910 void V4L2VideoDecodeAccelerator::ServiceDeviceTask(bool event_pending) {
911 DVLOG(3) << "ServiceDeviceTask()";
912 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
913 DCHECK_NE(decoder_state_, kUninitialized);
914 TRACE_EVENT0("Video Decoder", "V4L2VDA::ServiceDeviceTask");
916 if (decoder_state_ == kResetting) {
917 DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state";
918 return;
919 } else if (decoder_state_ == kError) {
920 DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
921 return;
922 } else if (decoder_state_ == kChangingResolution) {
923 DVLOG(2) << "ServiceDeviceTask(): early out: kChangingResolution state";
924 return;
927 bool resolution_change_pending = false;
928 if (event_pending)
929 resolution_change_pending = DequeueResolutionChangeEvent();
930 Dequeue();
931 Enqueue();
933 // Clear the interrupt fd.
934 if (!device_->ClearDevicePollInterrupt()) {
935 NOTIFY_ERROR(PLATFORM_FAILURE);
936 return;
939 bool poll_device = false;
940 // Add fd, if we should poll on it.
941 // Can be polled as soon as either input or output buffers are queued.
942 if (input_buffer_queued_count_ + output_buffer_queued_count_ > 0)
943 poll_device = true;
945 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
946 // so either:
947 // * device_poll_thread_ is running normally
948 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
949 // shut it down, in which case we're either in kResetting or kError states
950 // respectively, and we should have early-outed already.
951 DCHECK(device_poll_thread_.message_loop());
952 // Queue the DevicePollTask() now.
953 device_poll_thread_.message_loop()->PostTask(
954 FROM_HERE,
955 base::Bind(&V4L2VideoDecodeAccelerator::DevicePollTask,
956 base::Unretained(this),
957 poll_device));
959 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC["
960 << decoder_input_queue_.size() << "->"
961 << input_ready_queue_.size() << "] => DEVICE["
962 << free_input_buffers_.size() << "+"
963 << input_buffer_queued_count_ << "/"
964 << input_buffer_map_.size() << "->"
965 << free_output_buffers_.size() << "+"
966 << output_buffer_queued_count_ << "/"
967 << output_buffer_map_.size() << "] => VDA["
968 << decoder_frames_at_client_ << "]";
970 ScheduleDecodeBufferTaskIfNeeded();
971 if (resolution_change_pending)
972 StartResolutionChange();
975 void V4L2VideoDecodeAccelerator::Enqueue() {
976 DVLOG(3) << "Enqueue()";
977 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
978 DCHECK_NE(decoder_state_, kUninitialized);
979 TRACE_EVENT0("Video Decoder", "V4L2VDA::Enqueue");
981 // Drain the pipe of completed decode buffers.
982 const int old_inputs_queued = input_buffer_queued_count_;
983 while (!input_ready_queue_.empty()) {
984 if (!EnqueueInputRecord())
985 return;
987 if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
988 // We just started up a previously empty queue.
989 // Queue state changed; signal interrupt.
990 if (!device_->SetDevicePollInterrupt()) {
991 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
992 NOTIFY_ERROR(PLATFORM_FAILURE);
993 return;
995 // Start VIDIOC_STREAMON if we haven't yet.
996 if (!input_streamon_) {
997 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
998 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
999 input_streamon_ = true;
1003 // Enqueue all the outputs we can.
1004 const int old_outputs_queued = output_buffer_queued_count_;
1005 while (!free_output_buffers_.empty()) {
1006 if (!EnqueueOutputRecord())
1007 return;
1009 if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
1010 // We just started up a previously empty queue.
1011 // Queue state changed; signal interrupt.
1012 if (!device_->SetDevicePollInterrupt()) {
1013 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
1014 NOTIFY_ERROR(PLATFORM_FAILURE);
1015 return;
1017 // Start VIDIOC_STREAMON if we haven't yet.
1018 if (!output_streamon_) {
1019 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1020 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
1021 output_streamon_ = true;
1026 bool V4L2VideoDecodeAccelerator::DequeueResolutionChangeEvent() {
1027 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1028 DCHECK_NE(decoder_state_, kUninitialized);
1029 DVLOG(3) << "DequeueResolutionChangeEvent()";
1031 struct v4l2_event ev;
1032 memset(&ev, 0, sizeof(ev));
1034 while (device_->Ioctl(VIDIOC_DQEVENT, &ev) == 0) {
1035 if (ev.type == V4L2_EVENT_RESOLUTION_CHANGE) {
1036 DVLOG(3)
1037 << "DequeueResolutionChangeEvent(): got resolution change event.";
1038 return true;
1039 } else {
1040 LOG(ERROR) << "DequeueResolutionChangeEvent(): got an event (" << ev.type
1041 << ") we haven't subscribed to.";
1044 return false;
1047 void V4L2VideoDecodeAccelerator::Dequeue() {
1048 DVLOG(3) << "Dequeue()";
1049 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1050 DCHECK_NE(decoder_state_, kUninitialized);
1051 TRACE_EVENT0("Video Decoder", "V4L2VDA::Dequeue");
1053 // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free
1054 // list.
1055 while (input_buffer_queued_count_ > 0) {
1056 DCHECK(input_streamon_);
1057 struct v4l2_buffer dqbuf;
1058 struct v4l2_plane planes[1];
1059 memset(&dqbuf, 0, sizeof(dqbuf));
1060 memset(planes, 0, sizeof(planes));
1061 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1062 dqbuf.memory = V4L2_MEMORY_MMAP;
1063 dqbuf.m.planes = planes;
1064 dqbuf.length = 1;
1065 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1066 if (errno == EAGAIN) {
1067 // EAGAIN if we're just out of buffers to dequeue.
1068 break;
1070 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1071 NOTIFY_ERROR(PLATFORM_FAILURE);
1072 return;
1074 InputRecord& input_record = input_buffer_map_[dqbuf.index];
1075 DCHECK(input_record.at_device);
1076 free_input_buffers_.push_back(dqbuf.index);
1077 input_record.at_device = false;
1078 input_record.bytes_used = 0;
1079 input_record.input_id = -1;
1080 input_buffer_queued_count_--;
1083 // Dequeue completed output (VIDEO_CAPTURE) buffers, and queue to the
1084 // completed queue.
1085 while (output_buffer_queued_count_ > 0) {
1086 DCHECK(output_streamon_);
1087 struct v4l2_buffer dqbuf;
1088 scoped_ptr<struct v4l2_plane[]> planes(
1089 new v4l2_plane[output_planes_count_]);
1090 memset(&dqbuf, 0, sizeof(dqbuf));
1091 memset(planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1092 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1093 dqbuf.memory = V4L2_MEMORY_MMAP;
1094 dqbuf.m.planes = planes.get();
1095 dqbuf.length = output_planes_count_;
1096 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1097 if (errno == EAGAIN) {
1098 // EAGAIN if we're just out of buffers to dequeue.
1099 break;
1101 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1102 NOTIFY_ERROR(PLATFORM_FAILURE);
1103 return;
1105 OutputRecord& output_record = output_buffer_map_[dqbuf.index];
1106 DCHECK(output_record.at_device);
1107 DCHECK(!output_record.at_client);
1108 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1109 DCHECK_NE(output_record.picture_id, -1);
1110 output_record.at_device = false;
1111 if (dqbuf.m.planes[0].bytesused == 0) {
1112 // This is an empty output buffer returned as part of a flush.
1113 free_output_buffers_.push(dqbuf.index);
1114 } else {
1115 DCHECK_GE(dqbuf.timestamp.tv_sec, 0);
1116 output_record.at_client = true;
1117 DVLOG(3) << "Dequeue(): returning input_id=" << dqbuf.timestamp.tv_sec
1118 << " as picture_id=" << output_record.picture_id;
1119 const media::Picture& picture =
1120 media::Picture(output_record.picture_id, dqbuf.timestamp.tv_sec,
1121 gfx::Rect(visible_size_), false);
1122 pending_picture_ready_.push(
1123 PictureRecord(output_record.cleared, picture));
1124 SendPictureReady();
1125 output_record.cleared = true;
1126 decoder_frames_at_client_++;
1128 output_buffer_queued_count_--;
1131 NotifyFlushDoneIfNeeded();
1134 bool V4L2VideoDecodeAccelerator::EnqueueInputRecord() {
1135 DVLOG(3) << "EnqueueInputRecord()";
1136 DCHECK(!input_ready_queue_.empty());
1138 // Enqueue an input (VIDEO_OUTPUT) buffer.
1139 const int buffer = input_ready_queue_.front();
1140 InputRecord& input_record = input_buffer_map_[buffer];
1141 DCHECK(!input_record.at_device);
1142 struct v4l2_buffer qbuf;
1143 struct v4l2_plane qbuf_plane;
1144 memset(&qbuf, 0, sizeof(qbuf));
1145 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1146 qbuf.index = buffer;
1147 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1148 qbuf.timestamp.tv_sec = input_record.input_id;
1149 qbuf.memory = V4L2_MEMORY_MMAP;
1150 qbuf.m.planes = &qbuf_plane;
1151 qbuf.m.planes[0].bytesused = input_record.bytes_used;
1152 qbuf.length = 1;
1153 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1154 input_ready_queue_.pop();
1155 input_record.at_device = true;
1156 input_buffer_queued_count_++;
1157 DVLOG(3) << "EnqueueInputRecord(): enqueued input_id="
1158 << input_record.input_id << " size=" << input_record.bytes_used;
1159 return true;
1162 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() {
1163 DVLOG(3) << "EnqueueOutputRecord()";
1164 DCHECK(!free_output_buffers_.empty());
1166 // Enqueue an output (VIDEO_CAPTURE) buffer.
1167 const int buffer = free_output_buffers_.front();
1168 OutputRecord& output_record = output_buffer_map_[buffer];
1169 DCHECK(!output_record.at_device);
1170 DCHECK(!output_record.at_client);
1171 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1172 DCHECK_NE(output_record.picture_id, -1);
1173 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1174 TRACE_EVENT0("Video Decoder",
1175 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR");
1176 // If we have to wait for completion, wait. Note that
1177 // free_output_buffers_ is a FIFO queue, so we always wait on the
1178 // buffer that has been in the queue the longest.
1179 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
1180 EGL_FOREVER_KHR) == EGL_FALSE) {
1181 // This will cause tearing, but is safe otherwise.
1182 DVLOG(1) << __func__ << " eglClientWaitSyncKHR failed!";
1184 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1185 LOG(ERROR) << __func__ << " eglDestroySyncKHR failed!";
1186 NOTIFY_ERROR(PLATFORM_FAILURE);
1187 return false;
1189 output_record.egl_sync = EGL_NO_SYNC_KHR;
1191 struct v4l2_buffer qbuf;
1192 scoped_ptr<struct v4l2_plane[]> qbuf_planes(
1193 new v4l2_plane[output_planes_count_]);
1194 memset(&qbuf, 0, sizeof(qbuf));
1195 memset(
1196 qbuf_planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1197 qbuf.index = buffer;
1198 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1199 qbuf.memory = V4L2_MEMORY_MMAP;
1200 qbuf.m.planes = qbuf_planes.get();
1201 qbuf.length = output_planes_count_;
1202 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1203 free_output_buffers_.pop();
1204 output_record.at_device = true;
1205 output_buffer_queued_count_++;
1206 return true;
1209 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask(
1210 int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1211 DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id="
1212 << picture_buffer_id;
1213 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1214 TRACE_EVENT0("Video Decoder", "V4L2VDA::ReusePictureBufferTask");
1216 // We run ReusePictureBufferTask even if we're in kResetting.
1217 if (decoder_state_ == kError) {
1218 DVLOG(2) << "ReusePictureBufferTask(): early out: kError state";
1219 return;
1222 if (decoder_state_ == kChangingResolution) {
1223 DVLOG(2) << "ReusePictureBufferTask(): early out: kChangingResolution";
1224 return;
1227 size_t index;
1228 for (index = 0; index < output_buffer_map_.size(); ++index)
1229 if (output_buffer_map_[index].picture_id == picture_buffer_id)
1230 break;
1232 if (index >= output_buffer_map_.size()) {
1233 // It's possible that we've already posted a DismissPictureBuffer for this
1234 // picture, but it has not yet executed when this ReusePictureBuffer was
1235 // posted to us by the client. In that case just ignore this (we've already
1236 // dismissed it and accounted for that) and let the sync object get
1237 // destroyed.
1238 DVLOG(4) << "ReusePictureBufferTask(): got picture id= "
1239 << picture_buffer_id << " not in use (anymore?).";
1240 return;
1243 OutputRecord& output_record = output_buffer_map_[index];
1244 if (output_record.at_device || !output_record.at_client) {
1245 LOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable";
1246 NOTIFY_ERROR(INVALID_ARGUMENT);
1247 return;
1250 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1251 DCHECK(!output_record.at_device);
1252 output_record.at_client = false;
1253 output_record.egl_sync = egl_sync_ref->egl_sync;
1254 free_output_buffers_.push(index);
1255 decoder_frames_at_client_--;
1256 // Take ownership of the EGLSync.
1257 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1258 // We got a buffer back, so enqueue it back.
1259 Enqueue();
1262 void V4L2VideoDecodeAccelerator::FlushTask() {
1263 DVLOG(3) << "FlushTask()";
1264 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1265 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask");
1267 // Flush outstanding buffers.
1268 if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) {
1269 // There's nothing in the pipe, so return done immediately.
1270 DVLOG(3) << "FlushTask(): returning flush";
1271 child_task_runner_->PostTask(FROM_HERE,
1272 base::Bind(&Client::NotifyFlushDone, client_));
1273 return;
1274 } else if (decoder_state_ == kError) {
1275 DVLOG(2) << "FlushTask(): early out: kError state";
1276 return;
1279 // We don't support stacked flushing.
1280 DCHECK(!decoder_flushing_);
1282 // Queue up an empty buffer -- this triggers the flush.
1283 decoder_input_queue_.push(
1284 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
1285 io_client_, io_task_runner_, NULL, 0, kFlushBufferId)));
1286 decoder_flushing_ = true;
1287 SendPictureReady(); // Send all pending PictureReady.
1289 ScheduleDecodeBufferTaskIfNeeded();
1292 void V4L2VideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1293 if (!decoder_flushing_)
1294 return;
1296 // Pipeline is empty when:
1297 // * Decoder input queue is empty of non-delayed buffers.
1298 // * There is no currently filling input buffer.
1299 // * Input holding queue is empty.
1300 // * All input (VIDEO_OUTPUT) buffers are returned.
1301 if (!decoder_input_queue_.empty()) {
1302 if (decoder_input_queue_.front()->input_id !=
1303 decoder_delay_bitstream_buffer_id_)
1304 return;
1306 if (decoder_current_input_buffer_ != -1)
1307 return;
1308 if ((input_ready_queue_.size() + input_buffer_queued_count_) != 0)
1309 return;
1311 // TODO(posciak): crbug.com/270039. Exynos requires a streamoff-streamon
1312 // sequence after flush to continue, even if we are not resetting. This would
1313 // make sense, because we don't really want to resume from a non-resume point
1314 // (e.g. not from an IDR) if we are flushed.
1315 // MSE player however triggers a Flush() on chunk end, but never Reset(). One
1316 // could argue either way, or even say that Flush() is not needed/harmful when
1317 // transitioning to next chunk.
1318 // For now, do the streamoff-streamon cycle to satisfy Exynos and not freeze
1319 // when doing MSE. This should be harmless otherwise.
1320 if (!(StopDevicePoll() && StopOutputStream() && StopInputStream()))
1321 return;
1323 if (!StartDevicePoll())
1324 return;
1326 decoder_delay_bitstream_buffer_id_ = -1;
1327 decoder_flushing_ = false;
1328 DVLOG(3) << "NotifyFlushDoneIfNeeded(): returning flush";
1329 child_task_runner_->PostTask(FROM_HERE,
1330 base::Bind(&Client::NotifyFlushDone, client_));
1332 // While we were flushing, we early-outed DecodeBufferTask()s.
1333 ScheduleDecodeBufferTaskIfNeeded();
1336 void V4L2VideoDecodeAccelerator::ResetTask() {
1337 DVLOG(3) << "ResetTask()";
1338 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1339 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetTask");
1341 if (decoder_state_ == kError) {
1342 DVLOG(2) << "ResetTask(): early out: kError state";
1343 return;
1346 // If we are in the middle of switching resolutions, postpone reset until
1347 // it's done. We don't have to worry about timing of this wrt to decoding,
1348 // because output pipe is already stopped if we are changing resolution.
1349 // We will come back here after we are done with the resolution change.
1350 DCHECK(!resolution_change_reset_pending_);
1351 if (decoder_state_ == kChangingResolution) {
1352 resolution_change_reset_pending_ = true;
1353 return;
1356 // After the output stream is stopped, the codec should not post any
1357 // resolution change events. So we dequeue the resolution change event
1358 // afterwards. The event could be posted before or while stopping the output
1359 // stream. The codec will expect the buffer of new size after the seek, so
1360 // we need to handle the resolution change event first.
1361 if (!(StopDevicePoll() && StopOutputStream()))
1362 return;
1364 if (DequeueResolutionChangeEvent()) {
1365 resolution_change_reset_pending_ = true;
1366 StartResolutionChange();
1367 return;
1370 if (!StopInputStream())
1371 return;
1373 decoder_current_bitstream_buffer_.reset();
1374 while (!decoder_input_queue_.empty())
1375 decoder_input_queue_.pop();
1377 decoder_current_input_buffer_ = -1;
1379 // If we were flushing, we'll never return any more BitstreamBuffers or
1380 // PictureBuffers; they have all been dropped and returned by now.
1381 NotifyFlushDoneIfNeeded();
1383 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening
1384 // jobs will early-out in the kResetting state.
1385 decoder_state_ = kResetting;
1386 SendPictureReady(); // Send all pending PictureReady.
1387 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1388 &V4L2VideoDecodeAccelerator::ResetDoneTask, base::Unretained(this)));
1391 void V4L2VideoDecodeAccelerator::ResetDoneTask() {
1392 DVLOG(3) << "ResetDoneTask()";
1393 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1394 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetDoneTask");
1396 if (decoder_state_ == kError) {
1397 DVLOG(2) << "ResetDoneTask(): early out: kError state";
1398 return;
1401 if (!StartDevicePoll())
1402 return;
1404 // Reset format-specific bits.
1405 if (video_profile_ >= media::H264PROFILE_MIN &&
1406 video_profile_ <= media::H264PROFILE_MAX) {
1407 decoder_h264_parser_.reset(new media::H264Parser());
1410 // Jobs drained, we're finished resetting.
1411 DCHECK_EQ(decoder_state_, kResetting);
1412 if (output_buffer_map_.empty()) {
1413 // We must have gotten Reset() before we had a chance to request buffers
1414 // from the client.
1415 decoder_state_ = kInitialized;
1416 } else {
1417 decoder_state_ = kAfterReset;
1420 decoder_partial_frame_pending_ = false;
1421 decoder_delay_bitstream_buffer_id_ = -1;
1422 child_task_runner_->PostTask(FROM_HERE,
1423 base::Bind(&Client::NotifyResetDone, client_));
1425 // While we were resetting, we early-outed DecodeBufferTask()s.
1426 ScheduleDecodeBufferTaskIfNeeded();
1429 void V4L2VideoDecodeAccelerator::DestroyTask() {
1430 DVLOG(3) << "DestroyTask()";
1431 TRACE_EVENT0("Video Decoder", "V4L2VDA::DestroyTask");
1433 // DestroyTask() should run regardless of decoder_state_.
1435 StopDevicePoll();
1436 StopOutputStream();
1437 StopInputStream();
1439 decoder_current_bitstream_buffer_.reset();
1440 decoder_current_input_buffer_ = -1;
1441 decoder_decode_buffer_tasks_scheduled_ = 0;
1442 decoder_frames_at_client_ = 0;
1443 while (!decoder_input_queue_.empty())
1444 decoder_input_queue_.pop();
1445 decoder_flushing_ = false;
1447 // Set our state to kError. Just in case.
1448 decoder_state_ = kError;
1451 bool V4L2VideoDecodeAccelerator::StartDevicePoll() {
1452 DVLOG(3) << "StartDevicePoll()";
1453 DCHECK(!device_poll_thread_.IsRunning());
1454 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1456 // Start up the device poll thread and schedule its first DevicePollTask().
1457 if (!device_poll_thread_.Start()) {
1458 LOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
1459 NOTIFY_ERROR(PLATFORM_FAILURE);
1460 return false;
1462 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1463 &V4L2VideoDecodeAccelerator::DevicePollTask,
1464 base::Unretained(this),
1465 0));
1467 return true;
1470 bool V4L2VideoDecodeAccelerator::StopDevicePoll() {
1471 DVLOG(3) << "StopDevicePoll()";
1473 if (!device_poll_thread_.IsRunning())
1474 return true;
1476 if (decoder_thread_.IsRunning())
1477 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1479 // Signal the DevicePollTask() to stop, and stop the device poll thread.
1480 if (!device_->SetDevicePollInterrupt()) {
1481 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
1482 NOTIFY_ERROR(PLATFORM_FAILURE);
1483 return false;
1485 device_poll_thread_.Stop();
1486 // Clear the interrupt now, to be sure.
1487 if (!device_->ClearDevicePollInterrupt()) {
1488 NOTIFY_ERROR(PLATFORM_FAILURE);
1489 return false;
1491 DVLOG(3) << "StopDevicePoll(): device poll stopped";
1492 return true;
1495 bool V4L2VideoDecodeAccelerator::StopOutputStream() {
1496 DVLOG(3) << "StopOutputStream()";
1497 if (!output_streamon_)
1498 return true;
1500 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1501 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1502 output_streamon_ = false;
1504 // Reset accounting info for output.
1505 while (!free_output_buffers_.empty())
1506 free_output_buffers_.pop();
1508 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1509 OutputRecord& output_record = output_buffer_map_[i];
1510 DCHECK(!(output_record.at_client && output_record.at_device));
1512 // After streamoff, the device drops ownership of all buffers, even if
1513 // we don't dequeue them explicitly.
1514 output_buffer_map_[i].at_device = false;
1515 // Some of them may still be owned by the client however.
1516 // Reuse only those that aren't.
1517 if (!output_record.at_client) {
1518 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1519 free_output_buffers_.push(i);
1522 output_buffer_queued_count_ = 0;
1523 return true;
1526 bool V4L2VideoDecodeAccelerator::StopInputStream() {
1527 DVLOG(3) << "StopInputStream()";
1528 if (!input_streamon_)
1529 return true;
1531 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1532 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1533 input_streamon_ = false;
1535 // Reset accounting info for input.
1536 while (!input_ready_queue_.empty())
1537 input_ready_queue_.pop();
1538 free_input_buffers_.clear();
1539 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1540 free_input_buffers_.push_back(i);
1541 input_buffer_map_[i].at_device = false;
1542 input_buffer_map_[i].bytes_used = 0;
1543 input_buffer_map_[i].input_id = -1;
1545 input_buffer_queued_count_ = 0;
1547 return true;
1550 void V4L2VideoDecodeAccelerator::StartResolutionChange() {
1551 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1552 DCHECK_NE(decoder_state_, kUninitialized);
1553 DCHECK_NE(decoder_state_, kResetting);
1555 DVLOG(3) << "Initiate resolution change";
1557 if (!(StopDevicePoll() && StopOutputStream()))
1558 return;
1560 decoder_state_ = kChangingResolution;
1562 // Post a task to clean up buffers on child thread. This will also ensure
1563 // that we won't accept ReusePictureBuffer() anymore after that.
1564 child_task_runner_->PostTask(
1565 FROM_HERE,
1566 base::Bind(&V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers,
1567 weak_this_));
1570 void V4L2VideoDecodeAccelerator::FinishResolutionChange() {
1571 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1572 DCHECK_EQ(decoder_state_, kChangingResolution);
1573 DVLOG(3) << "FinishResolutionChange()";
1575 if (decoder_state_ == kError) {
1576 DVLOG(2) << "FinishResolutionChange(): early out: kError state";
1577 return;
1580 struct v4l2_format format;
1581 bool again;
1582 gfx::Size visible_size;
1583 bool ret = GetFormatInfo(&format, &visible_size, &again);
1584 if (!ret || again) {
1585 LOG(ERROR) << "Couldn't get format information after resolution change";
1586 NOTIFY_ERROR(PLATFORM_FAILURE);
1587 return;
1590 if (!CreateBuffersForFormat(format, visible_size)) {
1591 LOG(ERROR) << "Couldn't reallocate buffers after resolution change";
1592 NOTIFY_ERROR(PLATFORM_FAILURE);
1593 return;
1596 decoder_state_ = kDecoding;
1598 if (resolution_change_reset_pending_) {
1599 resolution_change_reset_pending_ = false;
1600 ResetTask();
1601 return;
1604 if (!StartDevicePoll())
1605 return;
1607 Enqueue();
1608 ScheduleDecodeBufferTaskIfNeeded();
1611 void V4L2VideoDecodeAccelerator::DevicePollTask(bool poll_device) {
1612 DVLOG(3) << "DevicePollTask()";
1613 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
1614 TRACE_EVENT0("Video Decoder", "V4L2VDA::DevicePollTask");
1616 bool event_pending = false;
1618 if (!device_->Poll(poll_device, &event_pending)) {
1619 NOTIFY_ERROR(PLATFORM_FAILURE);
1620 return;
1623 // All processing should happen on ServiceDeviceTask(), since we shouldn't
1624 // touch decoder state from this thread.
1625 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1626 &V4L2VideoDecodeAccelerator::ServiceDeviceTask,
1627 base::Unretained(this), event_pending));
1630 void V4L2VideoDecodeAccelerator::NotifyError(Error error) {
1631 DVLOG(2) << "NotifyError()";
1633 if (!child_task_runner_->BelongsToCurrentThread()) {
1634 child_task_runner_->PostTask(
1635 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::NotifyError,
1636 weak_this_, error));
1637 return;
1640 if (client_) {
1641 client_->NotifyError(error);
1642 client_ptr_factory_.reset();
1646 void V4L2VideoDecodeAccelerator::SetErrorState(Error error) {
1647 // We can touch decoder_state_ only if this is the decoder thread or the
1648 // decoder thread isn't running.
1649 if (decoder_thread_.message_loop() != NULL &&
1650 decoder_thread_.message_loop() != base::MessageLoop::current()) {
1651 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1652 &V4L2VideoDecodeAccelerator::SetErrorState,
1653 base::Unretained(this), error));
1654 return;
1657 // Post NotifyError only if we are already initialized, as the API does
1658 // not allow doing so before that.
1659 if (decoder_state_ != kError && decoder_state_ != kUninitialized)
1660 NotifyError(error);
1662 decoder_state_ = kError;
1665 bool V4L2VideoDecodeAccelerator::GetFormatInfo(struct v4l2_format* format,
1666 gfx::Size* visible_size,
1667 bool* again) {
1668 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1670 *again = false;
1671 memset(format, 0, sizeof(*format));
1672 format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1673 if (device_->Ioctl(VIDIOC_G_FMT, format) != 0) {
1674 if (errno == EINVAL) {
1675 // EINVAL means we haven't seen sufficient stream to decode the format.
1676 *again = true;
1677 return true;
1678 } else {
1679 PLOG(ERROR) << __func__ << "(): ioctl() failed: VIDIOC_G_FMT";
1680 NOTIFY_ERROR(PLATFORM_FAILURE);
1681 return false;
1685 // Make sure we are still getting the format we set on initialization.
1686 if (format->fmt.pix_mp.pixelformat != output_format_fourcc_) {
1687 LOG(ERROR) << "Unexpected format from G_FMT on output";
1688 return false;
1691 gfx::Size coded_size(format->fmt.pix_mp.width, format->fmt.pix_mp.height);
1692 if (visible_size != nullptr)
1693 *visible_size = GetVisibleSize(coded_size);
1695 return true;
1698 bool V4L2VideoDecodeAccelerator::CreateBuffersForFormat(
1699 const struct v4l2_format& format,
1700 const gfx::Size& visible_size) {
1701 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1702 output_planes_count_ = format.fmt.pix_mp.num_planes;
1703 coded_size_.SetSize(format.fmt.pix_mp.width, format.fmt.pix_mp.height);
1704 visible_size_ = visible_size;
1705 DVLOG(3) << "CreateBuffersForFormat(): new resolution: "
1706 << coded_size_.ToString() << ", visible size: "
1707 << visible_size_.ToString();
1709 return CreateOutputBuffers();
1712 gfx::Size V4L2VideoDecodeAccelerator::GetVisibleSize(
1713 const gfx::Size& coded_size) {
1714 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1716 struct v4l2_crop crop_arg;
1717 memset(&crop_arg, 0, sizeof(crop_arg));
1718 crop_arg.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1720 if (device_->Ioctl(VIDIOC_G_CROP, &crop_arg) != 0) {
1721 PLOG(ERROR) << "GetVisibleSize(): ioctl() VIDIOC_G_CROP failed";
1722 return coded_size;
1725 gfx::Rect rect(crop_arg.c.left, crop_arg.c.top, crop_arg.c.width,
1726 crop_arg.c.height);
1727 DVLOG(3) << "visible rectangle is " << rect.ToString();
1728 if (!gfx::Rect(coded_size).Contains(rect)) {
1729 DLOG(ERROR) << "visible rectangle " << rect.ToString()
1730 << " is not inside coded size " << coded_size.ToString();
1731 return coded_size;
1733 if (rect.IsEmpty()) {
1734 DLOG(ERROR) << "visible size is empty";
1735 return coded_size;
1738 // Chrome assume picture frame is coded at (0, 0).
1739 if (!rect.origin().IsOrigin()) {
1740 DLOG(ERROR) << "Unexpected visible rectangle " << rect.ToString()
1741 << ", top-left is not origin";
1742 return coded_size;
1745 return rect.size();
1748 bool V4L2VideoDecodeAccelerator::CreateInputBuffers() {
1749 DVLOG(3) << "CreateInputBuffers()";
1750 // We always run this as we prepare to initialize.
1751 DCHECK_EQ(decoder_state_, kUninitialized);
1752 DCHECK(!input_streamon_);
1753 DCHECK(input_buffer_map_.empty());
1755 struct v4l2_requestbuffers reqbufs;
1756 memset(&reqbufs, 0, sizeof(reqbufs));
1757 reqbufs.count = kInputBufferCount;
1758 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1759 reqbufs.memory = V4L2_MEMORY_MMAP;
1760 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1761 input_buffer_map_.resize(reqbufs.count);
1762 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1763 free_input_buffers_.push_back(i);
1765 // Query for the MEMORY_MMAP pointer.
1766 struct v4l2_plane planes[1];
1767 struct v4l2_buffer buffer;
1768 memset(&buffer, 0, sizeof(buffer));
1769 memset(planes, 0, sizeof(planes));
1770 buffer.index = i;
1771 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1772 buffer.memory = V4L2_MEMORY_MMAP;
1773 buffer.m.planes = planes;
1774 buffer.length = 1;
1775 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
1776 void* address = device_->Mmap(NULL,
1777 buffer.m.planes[0].length,
1778 PROT_READ | PROT_WRITE,
1779 MAP_SHARED,
1780 buffer.m.planes[0].m.mem_offset);
1781 if (address == MAP_FAILED) {
1782 PLOG(ERROR) << "CreateInputBuffers(): mmap() failed";
1783 return false;
1785 input_buffer_map_[i].address = address;
1786 input_buffer_map_[i].length = buffer.m.planes[0].length;
1789 return true;
1792 bool V4L2VideoDecodeAccelerator::SetupFormats() {
1793 // We always run this as we prepare to initialize.
1794 DCHECK_EQ(decoder_state_, kUninitialized);
1795 DCHECK(!input_streamon_);
1796 DCHECK(!output_streamon_);
1798 __u32 input_format_fourcc =
1799 V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, false);
1800 if (!input_format_fourcc) {
1801 NOTREACHED();
1802 return false;
1805 size_t input_size;
1806 gfx::Size max_resolution, min_resolution;
1807 device_->GetSupportedResolution(input_format_fourcc, &min_resolution,
1808 &max_resolution);
1809 if (max_resolution.width() > 1920 && max_resolution.height() > 1088)
1810 input_size = kInputBufferMaxSizeFor4k;
1811 else
1812 input_size = kInputBufferMaxSizeFor1080p;
1814 struct v4l2_fmtdesc fmtdesc;
1815 memset(&fmtdesc, 0, sizeof(fmtdesc));
1816 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1817 bool is_format_supported = false;
1818 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
1819 if (fmtdesc.pixelformat == input_format_fourcc) {
1820 is_format_supported = true;
1821 break;
1823 ++fmtdesc.index;
1826 if (!is_format_supported) {
1827 DVLOG(1) << "Input fourcc " << input_format_fourcc
1828 << " not supported by device.";
1829 return false;
1832 struct v4l2_format format;
1833 memset(&format, 0, sizeof(format));
1834 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1835 format.fmt.pix_mp.pixelformat = input_format_fourcc;
1836 format.fmt.pix_mp.plane_fmt[0].sizeimage = input_size;
1837 format.fmt.pix_mp.num_planes = 1;
1838 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1840 // We have to set up the format for output, because the driver may not allow
1841 // changing it once we start streaming; whether it can support our chosen
1842 // output format or not may depend on the input format.
1843 memset(&fmtdesc, 0, sizeof(fmtdesc));
1844 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1845 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
1846 if (device_->CanCreateEGLImageFrom(fmtdesc.pixelformat)) {
1847 output_format_fourcc_ = fmtdesc.pixelformat;
1848 break;
1850 ++fmtdesc.index;
1853 if (output_format_fourcc_ == 0) {
1854 LOG(ERROR) << "Could not find a usable output format";
1855 return false;
1858 // Just set the fourcc for output; resolution, etc., will come from the
1859 // driver once it extracts it from the stream.
1860 memset(&format, 0, sizeof(format));
1861 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1862 format.fmt.pix_mp.pixelformat = output_format_fourcc_;
1863 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1865 return true;
1868 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() {
1869 DVLOG(3) << "CreateOutputBuffers()";
1870 DCHECK(decoder_state_ == kInitialized ||
1871 decoder_state_ == kChangingResolution);
1872 DCHECK(!output_streamon_);
1873 DCHECK(output_buffer_map_.empty());
1875 // Number of output buffers we need.
1876 struct v4l2_control ctrl;
1877 memset(&ctrl, 0, sizeof(ctrl));
1878 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1879 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
1880 output_dpb_size_ = ctrl.value;
1882 // Output format setup in Initialize().
1884 const uint32_t buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount;
1885 DVLOG(3) << "CreateOutputBuffers(): ProvidePictureBuffers(): "
1886 << "buffer_count=" << buffer_count
1887 << ", coded_size=" << coded_size_.ToString();
1888 child_task_runner_->PostTask(
1889 FROM_HERE, base::Bind(&Client::ProvidePictureBuffers, client_,
1890 buffer_count, coded_size_,
1891 device_->GetTextureTarget()));
1893 // Wait for the client to call AssignPictureBuffers() on the Child thread.
1894 // We do this, because if we continue decoding without finishing buffer
1895 // allocation, we may end up Resetting before AssignPictureBuffers arrives,
1896 // resulting in unnecessary complications and subtle bugs.
1897 // For example, if the client calls Decode(Input1), Reset(), Decode(Input2)
1898 // in a sequence, and Decode(Input1) results in us getting here and exiting
1899 // without waiting, we might end up running Reset{,Done}Task() before
1900 // AssignPictureBuffers is scheduled, thus cleaning up and pushing buffers
1901 // to the free_output_buffers_ map twice. If we somehow marked buffers as
1902 // not ready, we'd need special handling for restarting the second Decode
1903 // task and delaying it anyway.
1904 // Waiting here is not very costly and makes reasoning about different
1905 // situations much simpler.
1906 pictures_assigned_.Wait();
1908 Enqueue();
1909 return true;
1912 void V4L2VideoDecodeAccelerator::DestroyInputBuffers() {
1913 DVLOG(3) << "DestroyInputBuffers()";
1914 DCHECK(child_task_runner_->BelongsToCurrentThread());
1915 DCHECK(!input_streamon_);
1917 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1918 if (input_buffer_map_[i].address != NULL) {
1919 device_->Munmap(input_buffer_map_[i].address,
1920 input_buffer_map_[i].length);
1924 struct v4l2_requestbuffers reqbufs;
1925 memset(&reqbufs, 0, sizeof(reqbufs));
1926 reqbufs.count = 0;
1927 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1928 reqbufs.memory = V4L2_MEMORY_MMAP;
1929 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1931 input_buffer_map_.clear();
1932 free_input_buffers_.clear();
1935 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() {
1936 DVLOG(3) << "DestroyOutputBuffers()";
1937 DCHECK(child_task_runner_->BelongsToCurrentThread());
1938 DCHECK(!output_streamon_);
1939 bool success = true;
1941 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1942 OutputRecord& output_record = output_buffer_map_[i];
1944 if (output_record.egl_image != EGL_NO_IMAGE_KHR) {
1945 if (device_->DestroyEGLImage(egl_display_, output_record.egl_image) !=
1946 EGL_TRUE) {
1947 DVLOG(1) << __func__ << " DestroyEGLImage failed.";
1948 success = false;
1952 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1953 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1954 DVLOG(1) << __func__ << " eglDestroySyncKHR failed.";
1955 success = false;
1959 DVLOG(1) << "DestroyOutputBuffers(): dismissing PictureBuffer id="
1960 << output_record.picture_id;
1961 child_task_runner_->PostTask(
1962 FROM_HERE, base::Bind(&Client::DismissPictureBuffer, client_,
1963 output_record.picture_id));
1966 struct v4l2_requestbuffers reqbufs;
1967 memset(&reqbufs, 0, sizeof(reqbufs));
1968 reqbufs.count = 0;
1969 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1970 reqbufs.memory = V4L2_MEMORY_MMAP;
1971 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
1972 PLOG(ERROR) << "DestroyOutputBuffers() ioctl() failed: VIDIOC_REQBUFS";
1973 success = false;
1976 output_buffer_map_.clear();
1977 while (!free_output_buffers_.empty())
1978 free_output_buffers_.pop();
1980 return success;
1983 void V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers() {
1984 DCHECK(child_task_runner_->BelongsToCurrentThread());
1985 DVLOG(3) << "ResolutionChangeDestroyBuffers()";
1987 if (!DestroyOutputBuffers()) {
1988 LOG(ERROR) << __func__ << " Failed destroying output buffers.";
1989 NOTIFY_ERROR(PLATFORM_FAILURE);
1990 return;
1993 // Finish resolution change on decoder thread.
1994 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1995 &V4L2VideoDecodeAccelerator::FinishResolutionChange,
1996 base::Unretained(this)));
1999 void V4L2VideoDecodeAccelerator::SendPictureReady() {
2000 DVLOG(3) << "SendPictureReady()";
2001 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
2002 bool resetting_or_flushing =
2003 (decoder_state_ == kResetting || decoder_flushing_);
2004 while (pending_picture_ready_.size() > 0) {
2005 bool cleared = pending_picture_ready_.front().cleared;
2006 const media::Picture& picture = pending_picture_ready_.front().picture;
2007 if (cleared && picture_clearing_count_ == 0) {
2008 // This picture is cleared. Post it to IO thread to reduce latency. This
2009 // should be the case after all pictures are cleared at the beginning.
2010 io_task_runner_->PostTask(
2011 FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture));
2012 pending_picture_ready_.pop();
2013 } else if (!cleared || resetting_or_flushing) {
2014 DVLOG(3) << "SendPictureReady()"
2015 << ". cleared=" << pending_picture_ready_.front().cleared
2016 << ", decoder_state_=" << decoder_state_
2017 << ", decoder_flushing_=" << decoder_flushing_
2018 << ", picture_clearing_count_=" << picture_clearing_count_;
2019 // If the picture is not cleared, post it to the child thread because it
2020 // has to be cleared in the child thread. A picture only needs to be
2021 // cleared once. If the decoder is resetting or flushing, send all
2022 // pictures to ensure PictureReady arrive before reset or flush done.
2023 child_task_runner_->PostTaskAndReply(
2024 FROM_HERE, base::Bind(&Client::PictureReady, client_, picture),
2025 // Unretained is safe. If Client::PictureReady gets to run, |this| is
2026 // alive. Destroy() will wait the decode thread to finish.
2027 base::Bind(&V4L2VideoDecodeAccelerator::PictureCleared,
2028 base::Unretained(this)));
2029 picture_clearing_count_++;
2030 pending_picture_ready_.pop();
2031 } else {
2032 // This picture is cleared. But some pictures are about to be cleared on
2033 // the child thread. To preserve the order, do not send this until those
2034 // pictures are cleared.
2035 break;
2040 void V4L2VideoDecodeAccelerator::PictureCleared() {
2041 DVLOG(3) << "PictureCleared(). clearing count=" << picture_clearing_count_;
2042 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
2043 DCHECK_GT(picture_clearing_count_, 0);
2044 picture_clearing_count_--;
2045 SendPictureReady();
2048 } // namespace content