Pin Chrome's shortcut to the Win10 Start menu on install and OS upgrade.
[chromium-blink-merge.git] / content / common / gpu / media / v4l2_video_decode_accelerator.cc
blob7076c47e7b5467b4da829b73a2c2d9e499b63bf7
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <dlfcn.h>
6 #include <errno.h>
7 #include <fcntl.h>
8 #include <linux/videodev2.h>
9 #include <poll.h>
10 #include <sys/eventfd.h>
11 #include <sys/ioctl.h>
12 #include <sys/mman.h>
14 #include "base/bind.h"
15 #include "base/command_line.h"
16 #include "base/memory/shared_memory.h"
17 #include "base/message_loop/message_loop.h"
18 #include "base/numerics/safe_conversions.h"
19 #include "base/thread_task_runner_handle.h"
20 #include "base/trace_event/trace_event.h"
21 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
22 #include "media/base/media_switches.h"
23 #include "media/filters/h264_parser.h"
24 #include "ui/gfx/geometry/rect.h"
25 #include "ui/gl/scoped_binders.h"
27 #define NOTIFY_ERROR(x) \
28 do { \
29 LOG(ERROR) << "Setting error state:" << x; \
30 SetErrorState(x); \
31 } while (0)
33 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \
34 do { \
35 if (device_->Ioctl(type, arg) != 0) { \
36 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << type_str; \
37 NOTIFY_ERROR(PLATFORM_FAILURE); \
38 return value; \
39 } \
40 } while (0)
42 #define IOCTL_OR_ERROR_RETURN(type, arg) \
43 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type)
45 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
46 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type)
48 #define IOCTL_OR_LOG_ERROR(type, arg) \
49 do { \
50 if (device_->Ioctl(type, arg) != 0) \
51 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
52 } while (0)
54 namespace content {
56 namespace {
58 // TODO(posciak): remove once we update linux-headers.
59 #ifndef V4L2_EVENT_RESOLUTION_CHANGE
60 #define V4L2_EVENT_RESOLUTION_CHANGE 5
61 #endif
63 } // anonymous namespace
65 struct V4L2VideoDecodeAccelerator::BitstreamBufferRef {
66 BitstreamBufferRef(
67 base::WeakPtr<Client>& client,
68 scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
69 base::SharedMemory* shm,
70 size_t size,
71 int32 input_id);
72 ~BitstreamBufferRef();
73 const base::WeakPtr<Client> client;
74 const scoped_refptr<base::SingleThreadTaskRunner> client_task_runner;
75 const scoped_ptr<base::SharedMemory> shm;
76 const size_t size;
77 size_t bytes_used;
78 const int32 input_id;
81 struct V4L2VideoDecodeAccelerator::EGLSyncKHRRef {
82 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
83 ~EGLSyncKHRRef();
84 EGLDisplay const egl_display;
85 EGLSyncKHR egl_sync;
88 struct V4L2VideoDecodeAccelerator::PictureRecord {
89 PictureRecord(bool cleared, const media::Picture& picture);
90 ~PictureRecord();
91 bool cleared; // Whether the texture is cleared and safe to render from.
92 media::Picture picture; // The decoded picture.
95 V4L2VideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
96 base::WeakPtr<Client>& client,
97 scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
98 base::SharedMemory* shm,
99 size_t size,
100 int32 input_id)
101 : client(client),
102 client_task_runner(client_task_runner),
103 shm(shm),
104 size(size),
105 bytes_used(0),
106 input_id(input_id) {
109 V4L2VideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
110 if (input_id >= 0) {
111 client_task_runner->PostTask(
112 FROM_HERE,
113 base::Bind(&Client::NotifyEndOfBitstreamBuffer, client, input_id));
117 V4L2VideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
118 EGLDisplay egl_display, EGLSyncKHR egl_sync)
119 : egl_display(egl_display),
120 egl_sync(egl_sync) {
123 V4L2VideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
124 // We don't check for eglDestroySyncKHR failures, because if we get here
125 // with a valid sync object, something went wrong and we are getting
126 // destroyed anyway.
127 if (egl_sync != EGL_NO_SYNC_KHR)
128 eglDestroySyncKHR(egl_display, egl_sync);
131 V4L2VideoDecodeAccelerator::InputRecord::InputRecord()
132 : at_device(false),
133 address(NULL),
134 length(0),
135 bytes_used(0),
136 input_id(-1) {
139 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {
142 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord()
143 : at_device(false),
144 at_client(false),
145 egl_image(EGL_NO_IMAGE_KHR),
146 egl_sync(EGL_NO_SYNC_KHR),
147 picture_id(-1),
148 cleared(false) {
151 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {}
153 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(
154 bool cleared,
155 const media::Picture& picture)
156 : cleared(cleared), picture(picture) {}
158 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
160 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator(
161 EGLDisplay egl_display,
162 EGLContext egl_context,
163 const base::WeakPtr<Client>& io_client,
164 const base::Callback<bool(void)>& make_context_current,
165 const scoped_refptr<V4L2Device>& device,
166 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
167 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
168 io_task_runner_(io_task_runner),
169 io_client_(io_client),
170 decoder_thread_("V4L2DecoderThread"),
171 decoder_state_(kUninitialized),
172 device_(device),
173 decoder_delay_bitstream_buffer_id_(-1),
174 decoder_current_input_buffer_(-1),
175 decoder_decode_buffer_tasks_scheduled_(0),
176 decoder_frames_at_client_(0),
177 decoder_flushing_(false),
178 resolution_change_reset_pending_(false),
179 decoder_partial_frame_pending_(false),
180 input_streamon_(false),
181 input_buffer_queued_count_(0),
182 output_streamon_(false),
183 output_buffer_queued_count_(0),
184 output_dpb_size_(0),
185 output_planes_count_(0),
186 picture_clearing_count_(0),
187 pictures_assigned_(false, false),
188 device_poll_thread_("V4L2DevicePollThread"),
189 make_context_current_(make_context_current),
190 egl_display_(egl_display),
191 egl_context_(egl_context),
192 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN),
193 output_format_fourcc_(0),
194 weak_this_factory_(this) {
195 weak_this_ = weak_this_factory_.GetWeakPtr();
198 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() {
199 DCHECK(!decoder_thread_.IsRunning());
200 DCHECK(!device_poll_thread_.IsRunning());
202 DestroyInputBuffers();
203 DestroyOutputBuffers();
205 // These maps have members that should be manually destroyed, e.g. file
206 // descriptors, mmap() segments, etc.
207 DCHECK(input_buffer_map_.empty());
208 DCHECK(output_buffer_map_.empty());
211 bool V4L2VideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
212 Client* client) {
213 DVLOG(3) << "Initialize()";
214 DCHECK(child_task_runner_->BelongsToCurrentThread());
215 DCHECK_EQ(decoder_state_, kUninitialized);
217 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
218 client_ = client_ptr_factory_->GetWeakPtr();
220 switch (profile) {
221 case media::H264PROFILE_BASELINE:
222 DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE";
223 break;
224 case media::H264PROFILE_MAIN:
225 DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN";
226 break;
227 case media::H264PROFILE_HIGH:
228 DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH";
229 break;
230 case media::VP8PROFILE_ANY:
231 DVLOG(2) << "Initialize(): profile VP8PROFILE_ANY";
232 break;
233 case media::VP9PROFILE_ANY:
234 DVLOG(2) << "Initialize(): profile VP9PROFILE_ANY";
235 break;
236 default:
237 DLOG(ERROR) << "Initialize(): unsupported profile=" << profile;
238 return false;
240 video_profile_ = profile;
242 if (egl_display_ == EGL_NO_DISPLAY) {
243 LOG(ERROR) << "Initialize(): could not get EGLDisplay";
244 return false;
247 // We need the context to be initialized to query extensions.
248 if (!make_context_current_.Run()) {
249 LOG(ERROR) << "Initialize(): could not make context current";
250 return false;
253 // TODO(posciak): crbug.com/450898.
254 #if defined(ARCH_CPU_ARMEL)
255 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
256 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync";
257 return false;
259 #endif
261 // Capabilities check.
262 struct v4l2_capability caps;
263 const __u32 kCapsRequired =
264 V4L2_CAP_VIDEO_CAPTURE_MPLANE |
265 V4L2_CAP_VIDEO_OUTPUT_MPLANE |
266 V4L2_CAP_STREAMING;
267 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
268 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
269 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
270 ", caps check failed: 0x" << std::hex << caps.capabilities;
271 return false;
274 if (!SetupFormats())
275 return false;
277 // Subscribe to the resolution change event.
278 struct v4l2_event_subscription sub;
279 memset(&sub, 0, sizeof(sub));
280 sub.type = V4L2_EVENT_RESOLUTION_CHANGE;
281 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_SUBSCRIBE_EVENT, &sub);
283 if (video_profile_ >= media::H264PROFILE_MIN &&
284 video_profile_ <= media::H264PROFILE_MAX) {
285 decoder_h264_parser_.reset(new media::H264Parser());
288 if (!CreateInputBuffers())
289 return false;
291 if (!decoder_thread_.Start()) {
292 LOG(ERROR) << "Initialize(): decoder thread failed to start";
293 return false;
296 decoder_state_ = kInitialized;
298 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here.
299 decoder_thread_.message_loop()->PostTask(
300 FROM_HERE,
301 base::Bind(
302 base::IgnoreResult(&V4L2VideoDecodeAccelerator::StartDevicePoll),
303 base::Unretained(this)));
305 return true;
308 void V4L2VideoDecodeAccelerator::Decode(
309 const media::BitstreamBuffer& bitstream_buffer) {
310 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
311 << ", size=" << bitstream_buffer.size();
312 DCHECK(io_task_runner_->BelongsToCurrentThread());
314 // DecodeTask() will take care of running a DecodeBufferTask().
315 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
316 &V4L2VideoDecodeAccelerator::DecodeTask, base::Unretained(this),
317 bitstream_buffer));
320 void V4L2VideoDecodeAccelerator::AssignPictureBuffers(
321 const std::vector<media::PictureBuffer>& buffers) {
322 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size();
323 DCHECK(child_task_runner_->BelongsToCurrentThread());
325 if (buffers.size() != output_buffer_map_.size()) {
326 LOG(ERROR) << "AssignPictureBuffers(): Failed to provide requested picture"
327 " buffers. (Got " << buffers.size()
328 << ", requested " << output_buffer_map_.size() << ")";
329 NOTIFY_ERROR(INVALID_ARGUMENT);
330 return;
333 if (!make_context_current_.Run()) {
334 LOG(ERROR) << "AssignPictureBuffers(): could not make context current";
335 NOTIFY_ERROR(PLATFORM_FAILURE);
336 return;
339 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0);
341 // It's safe to manipulate all the buffer state here, because the decoder
342 // thread is waiting on pictures_assigned_.
343 DCHECK(free_output_buffers_.empty());
344 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
345 DCHECK(buffers[i].size() == coded_size_);
347 OutputRecord& output_record = output_buffer_map_[i];
348 DCHECK(!output_record.at_device);
349 DCHECK(!output_record.at_client);
350 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
351 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
352 DCHECK_EQ(output_record.picture_id, -1);
353 DCHECK_EQ(output_record.cleared, false);
355 EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_,
356 egl_context_,
357 buffers[i].texture_id(),
358 coded_size_,
360 output_format_fourcc_,
361 output_planes_count_);
362 if (egl_image == EGL_NO_IMAGE_KHR) {
363 LOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
364 // Ownership of EGLImages allocated in previous iterations of this loop
365 // has been transferred to output_buffer_map_. After we error-out here
366 // the destructor will handle their cleanup.
367 NOTIFY_ERROR(PLATFORM_FAILURE);
368 return;
371 output_record.egl_image = egl_image;
372 output_record.picture_id = buffers[i].id();
373 free_output_buffers_.push(i);
374 DVLOG(3) << "AssignPictureBuffers(): buffer[" << i
375 << "]: picture_id=" << output_record.picture_id;
378 pictures_assigned_.Signal();
381 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) {
382 DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id;
383 // Must be run on child thread, as we'll insert a sync in the EGL context.
384 DCHECK(child_task_runner_->BelongsToCurrentThread());
386 if (!make_context_current_.Run()) {
387 LOG(ERROR) << "ReusePictureBuffer(): could not make context current";
388 NOTIFY_ERROR(PLATFORM_FAILURE);
389 return;
392 EGLSyncKHR egl_sync = EGL_NO_SYNC_KHR;
393 // TODO(posciak): crbug.com/450898.
394 #if defined(ARCH_CPU_ARMEL)
395 egl_sync = eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
396 if (egl_sync == EGL_NO_SYNC_KHR) {
397 LOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed";
398 NOTIFY_ERROR(PLATFORM_FAILURE);
399 return;
401 #endif
403 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef(
404 egl_display_, egl_sync));
405 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
406 &V4L2VideoDecodeAccelerator::ReusePictureBufferTask,
407 base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref)));
410 void V4L2VideoDecodeAccelerator::Flush() {
411 DVLOG(3) << "Flush()";
412 DCHECK(child_task_runner_->BelongsToCurrentThread());
413 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
414 &V4L2VideoDecodeAccelerator::FlushTask, base::Unretained(this)));
417 void V4L2VideoDecodeAccelerator::Reset() {
418 DVLOG(3) << "Reset()";
419 DCHECK(child_task_runner_->BelongsToCurrentThread());
420 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
421 &V4L2VideoDecodeAccelerator::ResetTask, base::Unretained(this)));
424 void V4L2VideoDecodeAccelerator::Destroy() {
425 DVLOG(3) << "Destroy()";
426 DCHECK(child_task_runner_->BelongsToCurrentThread());
428 // We're destroying; cancel all callbacks.
429 client_ptr_factory_.reset();
430 weak_this_factory_.InvalidateWeakPtrs();
432 // If the decoder thread is running, destroy using posted task.
433 if (decoder_thread_.IsRunning()) {
434 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
435 &V4L2VideoDecodeAccelerator::DestroyTask, base::Unretained(this)));
436 pictures_assigned_.Signal();
437 // DestroyTask() will cause the decoder_thread_ to flush all tasks.
438 decoder_thread_.Stop();
439 } else {
440 // Otherwise, call the destroy task directly.
441 DestroyTask();
444 delete this;
447 bool V4L2VideoDecodeAccelerator::CanDecodeOnIOThread() { return true; }
449 // static
450 media::VideoDecodeAccelerator::SupportedProfiles
451 V4L2VideoDecodeAccelerator::GetSupportedProfiles() {
452 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder);
453 if (!device)
454 return SupportedProfiles();
456 const uint32_t supported_formats[] = {
457 V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9};
458 return device->GetSupportedDecodeProfiles(arraysize(supported_formats),
459 supported_formats);
462 void V4L2VideoDecodeAccelerator::DecodeTask(
463 const media::BitstreamBuffer& bitstream_buffer) {
464 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_buffer.id();
465 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
466 DCHECK_NE(decoder_state_, kUninitialized);
467 TRACE_EVENT1("Video Decoder", "V4L2VDA::DecodeTask", "input_id",
468 bitstream_buffer.id());
470 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
471 io_client_, io_task_runner_,
472 new base::SharedMemory(bitstream_buffer.handle(), true),
473 bitstream_buffer.size(), bitstream_buffer.id()));
474 if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
475 LOG(ERROR) << "Decode(): could not map bitstream_buffer";
476 NOTIFY_ERROR(UNREADABLE_INPUT);
477 return;
479 DVLOG(3) << "DecodeTask(): mapped at=" << bitstream_record->shm->memory();
481 if (decoder_state_ == kResetting || decoder_flushing_) {
482 // In the case that we're resetting or flushing, we need to delay decoding
483 // the BitstreamBuffers that come after the Reset() or Flush() call. When
484 // we're here, we know that this DecodeTask() was scheduled by a Decode()
485 // call that came after (in the client thread) the Reset() or Flush() call;
486 // thus set up the delay if necessary.
487 if (decoder_delay_bitstream_buffer_id_ == -1)
488 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
489 } else if (decoder_state_ == kError) {
490 DVLOG(2) << "DecodeTask(): early out: kError state";
491 return;
494 decoder_input_queue_.push(
495 linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
496 decoder_decode_buffer_tasks_scheduled_++;
497 DecodeBufferTask();
500 void V4L2VideoDecodeAccelerator::DecodeBufferTask() {
501 DVLOG(3) << "DecodeBufferTask()";
502 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
503 DCHECK_NE(decoder_state_, kUninitialized);
504 TRACE_EVENT0("Video Decoder", "V4L2VDA::DecodeBufferTask");
506 decoder_decode_buffer_tasks_scheduled_--;
508 if (decoder_state_ == kResetting) {
509 DVLOG(2) << "DecodeBufferTask(): early out: kResetting state";
510 return;
511 } else if (decoder_state_ == kError) {
512 DVLOG(2) << "DecodeBufferTask(): early out: kError state";
513 return;
514 } else if (decoder_state_ == kChangingResolution) {
515 DVLOG(2) << "DecodeBufferTask(): early out: resolution change pending";
516 return;
519 if (decoder_current_bitstream_buffer_ == NULL) {
520 if (decoder_input_queue_.empty()) {
521 // We're waiting for a new buffer -- exit without scheduling a new task.
522 return;
524 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
525 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
526 // We're asked to delay decoding on this and subsequent buffers.
527 return;
530 // Setup to use the next buffer.
531 decoder_current_bitstream_buffer_.reset(buffer_ref.release());
532 decoder_input_queue_.pop();
533 DVLOG(3) << "DecodeBufferTask(): reading input_id="
534 << decoder_current_bitstream_buffer_->input_id
535 << ", addr=" << (decoder_current_bitstream_buffer_->shm ?
536 decoder_current_bitstream_buffer_->shm->memory() :
537 NULL)
538 << ", size=" << decoder_current_bitstream_buffer_->size;
540 bool schedule_task = false;
541 const size_t size = decoder_current_bitstream_buffer_->size;
542 size_t decoded_size = 0;
543 if (size == 0) {
544 const int32 input_id = decoder_current_bitstream_buffer_->input_id;
545 if (input_id >= 0) {
546 // This is a buffer queued from the client that has zero size. Skip.
547 schedule_task = true;
548 } else {
549 // This is a buffer of zero size, queued to flush the pipe. Flush.
550 DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(),
551 static_cast<base::SharedMemory*>(NULL));
552 // Enqueue a buffer guaranteed to be empty. To do that, we flush the
553 // current input, enqueue no data to the next frame, then flush that down.
554 schedule_task = true;
555 if (decoder_current_input_buffer_ != -1 &&
556 input_buffer_map_[decoder_current_input_buffer_].input_id !=
557 kFlushBufferId)
558 schedule_task = FlushInputFrame();
560 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
561 DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer";
562 decoder_partial_frame_pending_ = false;
563 schedule_task = true;
564 } else {
565 // If we failed to enqueue the empty buffer (due to pipeline
566 // backpressure), don't advance the bitstream buffer queue, and don't
567 // schedule the next task. This bitstream buffer queue entry will get
568 // reprocessed when the pipeline frees up.
569 schedule_task = false;
572 } else {
573 // This is a buffer queued from the client, with actual contents. Decode.
574 const uint8* const data =
575 reinterpret_cast<const uint8*>(
576 decoder_current_bitstream_buffer_->shm->memory()) +
577 decoder_current_bitstream_buffer_->bytes_used;
578 const size_t data_size =
579 decoder_current_bitstream_buffer_->size -
580 decoder_current_bitstream_buffer_->bytes_used;
581 if (!AdvanceFrameFragment(data, data_size, &decoded_size)) {
582 NOTIFY_ERROR(UNREADABLE_INPUT);
583 return;
585 // AdvanceFrameFragment should not return a size larger than the buffer
586 // size, even on invalid data.
587 CHECK_LE(decoded_size, data_size);
589 switch (decoder_state_) {
590 case kInitialized:
591 case kAfterReset:
592 schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
593 break;
594 case kDecoding:
595 schedule_task = DecodeBufferContinue(data, decoded_size);
596 break;
597 default:
598 NOTIFY_ERROR(ILLEGAL_STATE);
599 return;
602 if (decoder_state_ == kError) {
603 // Failed during decode.
604 return;
607 if (schedule_task) {
608 decoder_current_bitstream_buffer_->bytes_used += decoded_size;
609 if (decoder_current_bitstream_buffer_->bytes_used ==
610 decoder_current_bitstream_buffer_->size) {
611 // Our current bitstream buffer is done; return it.
612 int32 input_id = decoder_current_bitstream_buffer_->input_id;
613 DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id;
614 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
615 decoder_current_bitstream_buffer_.reset();
617 ScheduleDecodeBufferTaskIfNeeded();
621 bool V4L2VideoDecodeAccelerator::AdvanceFrameFragment(
622 const uint8* data,
623 size_t size,
624 size_t* endpos) {
625 if (video_profile_ >= media::H264PROFILE_MIN &&
626 video_profile_ <= media::H264PROFILE_MAX) {
627 // For H264, we need to feed HW one frame at a time. This is going to take
628 // some parsing of our input stream.
629 decoder_h264_parser_->SetStream(data, size);
630 media::H264NALU nalu;
631 media::H264Parser::Result result;
632 *endpos = 0;
634 // Keep on peeking the next NALs while they don't indicate a frame
635 // boundary.
636 for (;;) {
637 bool end_of_frame = false;
638 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
639 if (result == media::H264Parser::kInvalidStream ||
640 result == media::H264Parser::kUnsupportedStream)
641 return false;
642 if (result == media::H264Parser::kEOStream) {
643 // We've reached the end of the buffer before finding a frame boundary.
644 decoder_partial_frame_pending_ = true;
645 return true;
647 switch (nalu.nal_unit_type) {
648 case media::H264NALU::kNonIDRSlice:
649 case media::H264NALU::kIDRSlice:
650 if (nalu.size < 1)
651 return false;
652 // For these two, if the "first_mb_in_slice" field is zero, start a
653 // new frame and return. This field is Exp-Golomb coded starting on
654 // the eighth data bit of the NAL; a zero value is encoded with a
655 // leading '1' bit in the byte, which we can detect as the byte being
656 // (unsigned) greater than or equal to 0x80.
657 if (nalu.data[1] >= 0x80) {
658 end_of_frame = true;
659 break;
661 break;
662 case media::H264NALU::kSEIMessage:
663 case media::H264NALU::kSPS:
664 case media::H264NALU::kPPS:
665 case media::H264NALU::kAUD:
666 case media::H264NALU::kEOSeq:
667 case media::H264NALU::kEOStream:
668 case media::H264NALU::kReserved14:
669 case media::H264NALU::kReserved15:
670 case media::H264NALU::kReserved16:
671 case media::H264NALU::kReserved17:
672 case media::H264NALU::kReserved18:
673 // These unconditionally signal a frame boundary.
674 end_of_frame = true;
675 break;
676 default:
677 // For all others, keep going.
678 break;
680 if (end_of_frame) {
681 if (!decoder_partial_frame_pending_ && *endpos == 0) {
682 // The frame was previously restarted, and we haven't filled the
683 // current frame with any contents yet. Start the new frame here and
684 // continue parsing NALs.
685 } else {
686 // The frame wasn't previously restarted and/or we have contents for
687 // the current frame; signal the start of a new frame here: we don't
688 // have a partial frame anymore.
689 decoder_partial_frame_pending_ = false;
690 return true;
693 *endpos = (nalu.data + nalu.size) - data;
695 NOTREACHED();
696 return false;
697 } else {
698 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN);
699 DCHECK_LE(video_profile_, media::VP9PROFILE_MAX);
700 // For VP8/9, we can just dump the entire buffer. No fragmentation needed,
701 // and we never return a partial frame.
702 *endpos = size;
703 decoder_partial_frame_pending_ = false;
704 return true;
708 void V4L2VideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
709 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
711 // If we're behind on tasks, schedule another one.
712 int buffers_to_decode = decoder_input_queue_.size();
713 if (decoder_current_bitstream_buffer_ != NULL)
714 buffers_to_decode++;
715 if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
716 decoder_decode_buffer_tasks_scheduled_++;
717 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
718 &V4L2VideoDecodeAccelerator::DecodeBufferTask,
719 base::Unretained(this)));
723 bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(
724 const void* data, size_t size, size_t* endpos) {
725 DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size;
726 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
727 DCHECK_NE(decoder_state_, kUninitialized);
728 DCHECK_NE(decoder_state_, kDecoding);
729 // Initial decode. We haven't been able to get output stream format info yet.
730 // Get it, and start decoding.
732 // Copy in and send to HW.
733 if (!AppendToInputFrame(data, size))
734 return false;
736 // If we only have a partial frame, don't flush and process yet.
737 if (decoder_partial_frame_pending_)
738 return true;
740 if (!FlushInputFrame())
741 return false;
743 // Recycle buffers.
744 Dequeue();
746 // Check and see if we have format info yet.
747 struct v4l2_format format;
748 gfx::Size visible_size;
749 bool again = false;
750 if (!GetFormatInfo(&format, &visible_size, &again))
751 return false;
753 *endpos = size;
755 if (again) {
756 // Need more stream to decode format, return true and schedule next buffer.
757 return true;
760 // Run this initialization only on first startup.
761 if (decoder_state_ == kInitialized) {
762 DVLOG(3) << "DecodeBufferInitial(): running initialization";
763 // Success! Setup our parameters.
764 if (!CreateBuffersForFormat(format, visible_size))
765 return false;
768 decoder_state_ = kDecoding;
769 ScheduleDecodeBufferTaskIfNeeded();
770 return true;
773 bool V4L2VideoDecodeAccelerator::DecodeBufferContinue(
774 const void* data, size_t size) {
775 DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size;
776 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
777 DCHECK_EQ(decoder_state_, kDecoding);
779 // Both of these calls will set kError state if they fail.
780 // Only flush the frame if it's complete.
781 return (AppendToInputFrame(data, size) &&
782 (decoder_partial_frame_pending_ || FlushInputFrame()));
785 bool V4L2VideoDecodeAccelerator::AppendToInputFrame(
786 const void* data, size_t size) {
787 DVLOG(3) << "AppendToInputFrame()";
788 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
789 DCHECK_NE(decoder_state_, kUninitialized);
790 DCHECK_NE(decoder_state_, kResetting);
791 DCHECK_NE(decoder_state_, kError);
792 // This routine can handle data == NULL and size == 0, which occurs when
793 // we queue an empty buffer for the purposes of flushing the pipe.
795 // Flush if we're too big
796 if (decoder_current_input_buffer_ != -1) {
797 InputRecord& input_record =
798 input_buffer_map_[decoder_current_input_buffer_];
799 if (input_record.bytes_used + size > input_record.length) {
800 if (!FlushInputFrame())
801 return false;
802 decoder_current_input_buffer_ = -1;
806 // Try to get an available input buffer
807 if (decoder_current_input_buffer_ == -1) {
808 if (free_input_buffers_.empty()) {
809 // See if we can get more free buffers from HW
810 Dequeue();
811 if (free_input_buffers_.empty()) {
812 // Nope!
813 DVLOG(2) << "AppendToInputFrame(): stalled for input buffers";
814 return false;
817 decoder_current_input_buffer_ = free_input_buffers_.back();
818 free_input_buffers_.pop_back();
819 InputRecord& input_record =
820 input_buffer_map_[decoder_current_input_buffer_];
821 DCHECK_EQ(input_record.bytes_used, 0);
822 DCHECK_EQ(input_record.input_id, -1);
823 DCHECK(decoder_current_bitstream_buffer_ != NULL);
824 input_record.input_id = decoder_current_bitstream_buffer_->input_id;
827 DCHECK(data != NULL || size == 0);
828 if (size == 0) {
829 // If we asked for an empty buffer, return now. We return only after
830 // getting the next input buffer, since we might actually want an empty
831 // input buffer for flushing purposes.
832 return true;
835 // Copy in to the buffer.
836 InputRecord& input_record =
837 input_buffer_map_[decoder_current_input_buffer_];
838 if (size > input_record.length - input_record.bytes_used) {
839 LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring";
840 NOTIFY_ERROR(UNREADABLE_INPUT);
841 return false;
843 memcpy(
844 reinterpret_cast<uint8*>(input_record.address) + input_record.bytes_used,
845 data,
846 size);
847 input_record.bytes_used += size;
849 return true;
852 bool V4L2VideoDecodeAccelerator::FlushInputFrame() {
853 DVLOG(3) << "FlushInputFrame()";
854 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
855 DCHECK_NE(decoder_state_, kUninitialized);
856 DCHECK_NE(decoder_state_, kResetting);
857 DCHECK_NE(decoder_state_, kError);
859 if (decoder_current_input_buffer_ == -1)
860 return true;
862 InputRecord& input_record =
863 input_buffer_map_[decoder_current_input_buffer_];
864 DCHECK_NE(input_record.input_id, -1);
865 DCHECK(input_record.input_id != kFlushBufferId ||
866 input_record.bytes_used == 0);
867 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
868 // got from the client. We can skip it if it is empty.
869 // * if input_id < 0 (should be kFlushBufferId in this case), this input
870 // buffer was prompted by a flush buffer, and should be queued even when
871 // empty.
872 if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
873 input_record.input_id = -1;
874 free_input_buffers_.push_back(decoder_current_input_buffer_);
875 decoder_current_input_buffer_ = -1;
876 return true;
879 // Queue it.
880 input_ready_queue_.push(decoder_current_input_buffer_);
881 decoder_current_input_buffer_ = -1;
882 DVLOG(3) << "FlushInputFrame(): submitting input_id="
883 << input_record.input_id;
884 // Enqueue once since there's new available input for it.
885 Enqueue();
887 return (decoder_state_ != kError);
890 void V4L2VideoDecodeAccelerator::ServiceDeviceTask(bool event_pending) {
891 DVLOG(3) << "ServiceDeviceTask()";
892 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
893 DCHECK_NE(decoder_state_, kUninitialized);
894 TRACE_EVENT0("Video Decoder", "V4L2VDA::ServiceDeviceTask");
896 if (decoder_state_ == kResetting) {
897 DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state";
898 return;
899 } else if (decoder_state_ == kError) {
900 DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
901 return;
902 } else if (decoder_state_ == kChangingResolution) {
903 DVLOG(2) << "ServiceDeviceTask(): early out: kChangingResolution state";
904 return;
907 bool resolution_change_pending = false;
908 if (event_pending)
909 resolution_change_pending = DequeueResolutionChangeEvent();
910 Dequeue();
911 Enqueue();
913 // Clear the interrupt fd.
914 if (!device_->ClearDevicePollInterrupt()) {
915 NOTIFY_ERROR(PLATFORM_FAILURE);
916 return;
919 bool poll_device = false;
920 // Add fd, if we should poll on it.
921 // Can be polled as soon as either input or output buffers are queued.
922 if (input_buffer_queued_count_ + output_buffer_queued_count_ > 0)
923 poll_device = true;
925 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
926 // so either:
927 // * device_poll_thread_ is running normally
928 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
929 // shut it down, in which case we're either in kResetting or kError states
930 // respectively, and we should have early-outed already.
931 DCHECK(device_poll_thread_.message_loop());
932 // Queue the DevicePollTask() now.
933 device_poll_thread_.message_loop()->PostTask(
934 FROM_HERE,
935 base::Bind(&V4L2VideoDecodeAccelerator::DevicePollTask,
936 base::Unretained(this),
937 poll_device));
939 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC["
940 << decoder_input_queue_.size() << "->"
941 << input_ready_queue_.size() << "] => DEVICE["
942 << free_input_buffers_.size() << "+"
943 << input_buffer_queued_count_ << "/"
944 << input_buffer_map_.size() << "->"
945 << free_output_buffers_.size() << "+"
946 << output_buffer_queued_count_ << "/"
947 << output_buffer_map_.size() << "] => VDA["
948 << decoder_frames_at_client_ << "]";
950 ScheduleDecodeBufferTaskIfNeeded();
951 if (resolution_change_pending)
952 StartResolutionChange();
955 void V4L2VideoDecodeAccelerator::Enqueue() {
956 DVLOG(3) << "Enqueue()";
957 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
958 DCHECK_NE(decoder_state_, kUninitialized);
959 TRACE_EVENT0("Video Decoder", "V4L2VDA::Enqueue");
961 // Drain the pipe of completed decode buffers.
962 const int old_inputs_queued = input_buffer_queued_count_;
963 while (!input_ready_queue_.empty()) {
964 if (!EnqueueInputRecord())
965 return;
967 if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
968 // We just started up a previously empty queue.
969 // Queue state changed; signal interrupt.
970 if (!device_->SetDevicePollInterrupt()) {
971 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
972 NOTIFY_ERROR(PLATFORM_FAILURE);
973 return;
975 // Start VIDIOC_STREAMON if we haven't yet.
976 if (!input_streamon_) {
977 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
978 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
979 input_streamon_ = true;
983 // Enqueue all the outputs we can.
984 const int old_outputs_queued = output_buffer_queued_count_;
985 while (!free_output_buffers_.empty()) {
986 if (!EnqueueOutputRecord())
987 return;
989 if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
990 // We just started up a previously empty queue.
991 // Queue state changed; signal interrupt.
992 if (!device_->SetDevicePollInterrupt()) {
993 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
994 NOTIFY_ERROR(PLATFORM_FAILURE);
995 return;
997 // Start VIDIOC_STREAMON if we haven't yet.
998 if (!output_streamon_) {
999 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1000 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
1001 output_streamon_ = true;
1006 bool V4L2VideoDecodeAccelerator::DequeueResolutionChangeEvent() {
1007 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1008 DCHECK_NE(decoder_state_, kUninitialized);
1009 DVLOG(3) << "DequeueResolutionChangeEvent()";
1011 struct v4l2_event ev;
1012 memset(&ev, 0, sizeof(ev));
1014 while (device_->Ioctl(VIDIOC_DQEVENT, &ev) == 0) {
1015 if (ev.type == V4L2_EVENT_RESOLUTION_CHANGE) {
1016 DVLOG(3)
1017 << "DequeueResolutionChangeEvent(): got resolution change event.";
1018 return true;
1019 } else {
1020 LOG(ERROR) << "DequeueResolutionChangeEvent(): got an event (" << ev.type
1021 << ") we haven't subscribed to.";
1024 return false;
1027 void V4L2VideoDecodeAccelerator::Dequeue() {
1028 DVLOG(3) << "Dequeue()";
1029 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1030 DCHECK_NE(decoder_state_, kUninitialized);
1031 TRACE_EVENT0("Video Decoder", "V4L2VDA::Dequeue");
1033 // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free
1034 // list.
1035 while (input_buffer_queued_count_ > 0) {
1036 DCHECK(input_streamon_);
1037 struct v4l2_buffer dqbuf;
1038 struct v4l2_plane planes[1];
1039 memset(&dqbuf, 0, sizeof(dqbuf));
1040 memset(planes, 0, sizeof(planes));
1041 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1042 dqbuf.memory = V4L2_MEMORY_MMAP;
1043 dqbuf.m.planes = planes;
1044 dqbuf.length = 1;
1045 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1046 if (errno == EAGAIN) {
1047 // EAGAIN if we're just out of buffers to dequeue.
1048 break;
1050 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1051 NOTIFY_ERROR(PLATFORM_FAILURE);
1052 return;
1054 InputRecord& input_record = input_buffer_map_[dqbuf.index];
1055 DCHECK(input_record.at_device);
1056 free_input_buffers_.push_back(dqbuf.index);
1057 input_record.at_device = false;
1058 input_record.bytes_used = 0;
1059 input_record.input_id = -1;
1060 input_buffer_queued_count_--;
1063 // Dequeue completed output (VIDEO_CAPTURE) buffers, and queue to the
1064 // completed queue.
1065 while (output_buffer_queued_count_ > 0) {
1066 DCHECK(output_streamon_);
1067 struct v4l2_buffer dqbuf;
1068 scoped_ptr<struct v4l2_plane[]> planes(
1069 new v4l2_plane[output_planes_count_]);
1070 memset(&dqbuf, 0, sizeof(dqbuf));
1071 memset(planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1072 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1073 dqbuf.memory = V4L2_MEMORY_MMAP;
1074 dqbuf.m.planes = planes.get();
1075 dqbuf.length = output_planes_count_;
1076 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1077 if (errno == EAGAIN) {
1078 // EAGAIN if we're just out of buffers to dequeue.
1079 break;
1081 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1082 NOTIFY_ERROR(PLATFORM_FAILURE);
1083 return;
1085 OutputRecord& output_record = output_buffer_map_[dqbuf.index];
1086 DCHECK(output_record.at_device);
1087 DCHECK(!output_record.at_client);
1088 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1089 DCHECK_NE(output_record.picture_id, -1);
1090 output_record.at_device = false;
1091 if (dqbuf.m.planes[0].bytesused == 0) {
1092 // This is an empty output buffer returned as part of a flush.
1093 free_output_buffers_.push(dqbuf.index);
1094 } else {
1095 DCHECK_GE(dqbuf.timestamp.tv_sec, 0);
1096 output_record.at_client = true;
1097 DVLOG(3) << "Dequeue(): returning input_id=" << dqbuf.timestamp.tv_sec
1098 << " as picture_id=" << output_record.picture_id;
1099 const media::Picture& picture =
1100 media::Picture(output_record.picture_id, dqbuf.timestamp.tv_sec,
1101 gfx::Rect(visible_size_), false);
1102 pending_picture_ready_.push(
1103 PictureRecord(output_record.cleared, picture));
1104 SendPictureReady();
1105 output_record.cleared = true;
1106 decoder_frames_at_client_++;
1108 output_buffer_queued_count_--;
1111 NotifyFlushDoneIfNeeded();
1114 bool V4L2VideoDecodeAccelerator::EnqueueInputRecord() {
1115 DVLOG(3) << "EnqueueInputRecord()";
1116 DCHECK(!input_ready_queue_.empty());
1118 // Enqueue an input (VIDEO_OUTPUT) buffer.
1119 const int buffer = input_ready_queue_.front();
1120 InputRecord& input_record = input_buffer_map_[buffer];
1121 DCHECK(!input_record.at_device);
1122 struct v4l2_buffer qbuf;
1123 struct v4l2_plane qbuf_plane;
1124 memset(&qbuf, 0, sizeof(qbuf));
1125 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1126 qbuf.index = buffer;
1127 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1128 qbuf.timestamp.tv_sec = input_record.input_id;
1129 qbuf.memory = V4L2_MEMORY_MMAP;
1130 qbuf.m.planes = &qbuf_plane;
1131 qbuf.m.planes[0].bytesused = input_record.bytes_used;
1132 qbuf.length = 1;
1133 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1134 input_ready_queue_.pop();
1135 input_record.at_device = true;
1136 input_buffer_queued_count_++;
1137 DVLOG(3) << "EnqueueInputRecord(): enqueued input_id="
1138 << input_record.input_id << " size=" << input_record.bytes_used;
1139 return true;
1142 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() {
1143 DVLOG(3) << "EnqueueOutputRecord()";
1144 DCHECK(!free_output_buffers_.empty());
1146 // Enqueue an output (VIDEO_CAPTURE) buffer.
1147 const int buffer = free_output_buffers_.front();
1148 OutputRecord& output_record = output_buffer_map_[buffer];
1149 DCHECK(!output_record.at_device);
1150 DCHECK(!output_record.at_client);
1151 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1152 DCHECK_NE(output_record.picture_id, -1);
1153 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1154 TRACE_EVENT0("Video Decoder",
1155 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR");
1156 // If we have to wait for completion, wait. Note that
1157 // free_output_buffers_ is a FIFO queue, so we always wait on the
1158 // buffer that has been in the queue the longest.
1159 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
1160 EGL_FOREVER_KHR) == EGL_FALSE) {
1161 // This will cause tearing, but is safe otherwise.
1162 DVLOG(1) << __func__ << " eglClientWaitSyncKHR failed!";
1164 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1165 LOG(ERROR) << __func__ << " eglDestroySyncKHR failed!";
1166 NOTIFY_ERROR(PLATFORM_FAILURE);
1167 return false;
1169 output_record.egl_sync = EGL_NO_SYNC_KHR;
1171 struct v4l2_buffer qbuf;
1172 scoped_ptr<struct v4l2_plane[]> qbuf_planes(
1173 new v4l2_plane[output_planes_count_]);
1174 memset(&qbuf, 0, sizeof(qbuf));
1175 memset(
1176 qbuf_planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1177 qbuf.index = buffer;
1178 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1179 qbuf.memory = V4L2_MEMORY_MMAP;
1180 qbuf.m.planes = qbuf_planes.get();
1181 qbuf.length = output_planes_count_;
1182 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1183 free_output_buffers_.pop();
1184 output_record.at_device = true;
1185 output_buffer_queued_count_++;
1186 return true;
1189 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask(
1190 int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1191 DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id="
1192 << picture_buffer_id;
1193 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1194 TRACE_EVENT0("Video Decoder", "V4L2VDA::ReusePictureBufferTask");
1196 // We run ReusePictureBufferTask even if we're in kResetting.
1197 if (decoder_state_ == kError) {
1198 DVLOG(2) << "ReusePictureBufferTask(): early out: kError state";
1199 return;
1202 if (decoder_state_ == kChangingResolution) {
1203 DVLOG(2) << "ReusePictureBufferTask(): early out: kChangingResolution";
1204 return;
1207 size_t index;
1208 for (index = 0; index < output_buffer_map_.size(); ++index)
1209 if (output_buffer_map_[index].picture_id == picture_buffer_id)
1210 break;
1212 if (index >= output_buffer_map_.size()) {
1213 // It's possible that we've already posted a DismissPictureBuffer for this
1214 // picture, but it has not yet executed when this ReusePictureBuffer was
1215 // posted to us by the client. In that case just ignore this (we've already
1216 // dismissed it and accounted for that) and let the sync object get
1217 // destroyed.
1218 DVLOG(4) << "ReusePictureBufferTask(): got picture id= "
1219 << picture_buffer_id << " not in use (anymore?).";
1220 return;
1223 OutputRecord& output_record = output_buffer_map_[index];
1224 if (output_record.at_device || !output_record.at_client) {
1225 LOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable";
1226 NOTIFY_ERROR(INVALID_ARGUMENT);
1227 return;
1230 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1231 DCHECK(!output_record.at_device);
1232 output_record.at_client = false;
1233 output_record.egl_sync = egl_sync_ref->egl_sync;
1234 free_output_buffers_.push(index);
1235 decoder_frames_at_client_--;
1236 // Take ownership of the EGLSync.
1237 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1238 // We got a buffer back, so enqueue it back.
1239 Enqueue();
1242 void V4L2VideoDecodeAccelerator::FlushTask() {
1243 DVLOG(3) << "FlushTask()";
1244 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1245 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask");
1247 // Flush outstanding buffers.
1248 if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) {
1249 // There's nothing in the pipe, so return done immediately.
1250 DVLOG(3) << "FlushTask(): returning flush";
1251 child_task_runner_->PostTask(FROM_HERE,
1252 base::Bind(&Client::NotifyFlushDone, client_));
1253 return;
1254 } else if (decoder_state_ == kError) {
1255 DVLOG(2) << "FlushTask(): early out: kError state";
1256 return;
1259 // We don't support stacked flushing.
1260 DCHECK(!decoder_flushing_);
1262 // Queue up an empty buffer -- this triggers the flush.
1263 decoder_input_queue_.push(
1264 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
1265 io_client_, io_task_runner_, NULL, 0, kFlushBufferId)));
1266 decoder_flushing_ = true;
1267 SendPictureReady(); // Send all pending PictureReady.
1269 ScheduleDecodeBufferTaskIfNeeded();
1272 void V4L2VideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1273 if (!decoder_flushing_)
1274 return;
1276 // Pipeline is empty when:
1277 // * Decoder input queue is empty of non-delayed buffers.
1278 // * There is no currently filling input buffer.
1279 // * Input holding queue is empty.
1280 // * All input (VIDEO_OUTPUT) buffers are returned.
1281 if (!decoder_input_queue_.empty()) {
1282 if (decoder_input_queue_.front()->input_id !=
1283 decoder_delay_bitstream_buffer_id_)
1284 return;
1286 if (decoder_current_input_buffer_ != -1)
1287 return;
1288 if ((input_ready_queue_.size() + input_buffer_queued_count_) != 0)
1289 return;
1291 // TODO(posciak): crbug.com/270039. Exynos requires a streamoff-streamon
1292 // sequence after flush to continue, even if we are not resetting. This would
1293 // make sense, because we don't really want to resume from a non-resume point
1294 // (e.g. not from an IDR) if we are flushed.
1295 // MSE player however triggers a Flush() on chunk end, but never Reset(). One
1296 // could argue either way, or even say that Flush() is not needed/harmful when
1297 // transitioning to next chunk.
1298 // For now, do the streamoff-streamon cycle to satisfy Exynos and not freeze
1299 // when doing MSE. This should be harmless otherwise.
1300 if (!(StopDevicePoll() && StopOutputStream() && StopInputStream()))
1301 return;
1303 if (!StartDevicePoll())
1304 return;
1306 decoder_delay_bitstream_buffer_id_ = -1;
1307 decoder_flushing_ = false;
1308 DVLOG(3) << "NotifyFlushDoneIfNeeded(): returning flush";
1309 child_task_runner_->PostTask(FROM_HERE,
1310 base::Bind(&Client::NotifyFlushDone, client_));
1312 // While we were flushing, we early-outed DecodeBufferTask()s.
1313 ScheduleDecodeBufferTaskIfNeeded();
1316 void V4L2VideoDecodeAccelerator::ResetTask() {
1317 DVLOG(3) << "ResetTask()";
1318 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1319 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetTask");
1321 if (decoder_state_ == kError) {
1322 DVLOG(2) << "ResetTask(): early out: kError state";
1323 return;
1326 // If we are in the middle of switching resolutions, postpone reset until
1327 // it's done. We don't have to worry about timing of this wrt to decoding,
1328 // because output pipe is already stopped if we are changing resolution.
1329 // We will come back here after we are done with the resolution change.
1330 DCHECK(!resolution_change_reset_pending_);
1331 if (decoder_state_ == kChangingResolution) {
1332 resolution_change_reset_pending_ = true;
1333 return;
1336 // After the output stream is stopped, the codec should not post any
1337 // resolution change events. So we dequeue the resolution change event
1338 // afterwards. The event could be posted before or while stopping the output
1339 // stream. The codec will expect the buffer of new size after the seek, so
1340 // we need to handle the resolution change event first.
1341 if (!(StopDevicePoll() && StopOutputStream()))
1342 return;
1344 if (DequeueResolutionChangeEvent()) {
1345 resolution_change_reset_pending_ = true;
1346 StartResolutionChange();
1347 return;
1350 if (!StopInputStream())
1351 return;
1353 decoder_current_bitstream_buffer_.reset();
1354 while (!decoder_input_queue_.empty())
1355 decoder_input_queue_.pop();
1357 decoder_current_input_buffer_ = -1;
1359 // If we were flushing, we'll never return any more BitstreamBuffers or
1360 // PictureBuffers; they have all been dropped and returned by now.
1361 NotifyFlushDoneIfNeeded();
1363 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening
1364 // jobs will early-out in the kResetting state.
1365 decoder_state_ = kResetting;
1366 SendPictureReady(); // Send all pending PictureReady.
1367 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1368 &V4L2VideoDecodeAccelerator::ResetDoneTask, base::Unretained(this)));
1371 void V4L2VideoDecodeAccelerator::ResetDoneTask() {
1372 DVLOG(3) << "ResetDoneTask()";
1373 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1374 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetDoneTask");
1376 if (decoder_state_ == kError) {
1377 DVLOG(2) << "ResetDoneTask(): early out: kError state";
1378 return;
1381 if (!StartDevicePoll())
1382 return;
1384 // Reset format-specific bits.
1385 if (video_profile_ >= media::H264PROFILE_MIN &&
1386 video_profile_ <= media::H264PROFILE_MAX) {
1387 decoder_h264_parser_.reset(new media::H264Parser());
1390 // Jobs drained, we're finished resetting.
1391 DCHECK_EQ(decoder_state_, kResetting);
1392 if (output_buffer_map_.empty()) {
1393 // We must have gotten Reset() before we had a chance to request buffers
1394 // from the client.
1395 decoder_state_ = kInitialized;
1396 } else {
1397 decoder_state_ = kAfterReset;
1400 decoder_partial_frame_pending_ = false;
1401 decoder_delay_bitstream_buffer_id_ = -1;
1402 child_task_runner_->PostTask(FROM_HERE,
1403 base::Bind(&Client::NotifyResetDone, client_));
1405 // While we were resetting, we early-outed DecodeBufferTask()s.
1406 ScheduleDecodeBufferTaskIfNeeded();
1409 void V4L2VideoDecodeAccelerator::DestroyTask() {
1410 DVLOG(3) << "DestroyTask()";
1411 TRACE_EVENT0("Video Decoder", "V4L2VDA::DestroyTask");
1413 // DestroyTask() should run regardless of decoder_state_.
1415 StopDevicePoll();
1416 StopOutputStream();
1417 StopInputStream();
1419 decoder_current_bitstream_buffer_.reset();
1420 decoder_current_input_buffer_ = -1;
1421 decoder_decode_buffer_tasks_scheduled_ = 0;
1422 decoder_frames_at_client_ = 0;
1423 while (!decoder_input_queue_.empty())
1424 decoder_input_queue_.pop();
1425 decoder_flushing_ = false;
1427 // Set our state to kError. Just in case.
1428 decoder_state_ = kError;
1431 bool V4L2VideoDecodeAccelerator::StartDevicePoll() {
1432 DVLOG(3) << "StartDevicePoll()";
1433 DCHECK(!device_poll_thread_.IsRunning());
1434 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1436 // Start up the device poll thread and schedule its first DevicePollTask().
1437 if (!device_poll_thread_.Start()) {
1438 LOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
1439 NOTIFY_ERROR(PLATFORM_FAILURE);
1440 return false;
1442 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1443 &V4L2VideoDecodeAccelerator::DevicePollTask,
1444 base::Unretained(this),
1445 0));
1447 return true;
1450 bool V4L2VideoDecodeAccelerator::StopDevicePoll() {
1451 DVLOG(3) << "StopDevicePoll()";
1453 if (!device_poll_thread_.IsRunning())
1454 return true;
1456 if (decoder_thread_.IsRunning())
1457 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1459 // Signal the DevicePollTask() to stop, and stop the device poll thread.
1460 if (!device_->SetDevicePollInterrupt()) {
1461 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
1462 NOTIFY_ERROR(PLATFORM_FAILURE);
1463 return false;
1465 device_poll_thread_.Stop();
1466 // Clear the interrupt now, to be sure.
1467 if (!device_->ClearDevicePollInterrupt()) {
1468 NOTIFY_ERROR(PLATFORM_FAILURE);
1469 return false;
1471 DVLOG(3) << "StopDevicePoll(): device poll stopped";
1472 return true;
1475 bool V4L2VideoDecodeAccelerator::StopOutputStream() {
1476 DVLOG(3) << "StopOutputStream()";
1477 if (!output_streamon_)
1478 return true;
1480 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1481 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1482 output_streamon_ = false;
1484 // Reset accounting info for output.
1485 while (!free_output_buffers_.empty())
1486 free_output_buffers_.pop();
1488 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1489 OutputRecord& output_record = output_buffer_map_[i];
1490 DCHECK(!(output_record.at_client && output_record.at_device));
1492 // After streamoff, the device drops ownership of all buffers, even if
1493 // we don't dequeue them explicitly.
1494 output_buffer_map_[i].at_device = false;
1495 // Some of them may still be owned by the client however.
1496 // Reuse only those that aren't.
1497 if (!output_record.at_client) {
1498 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1499 free_output_buffers_.push(i);
1502 output_buffer_queued_count_ = 0;
1503 return true;
1506 bool V4L2VideoDecodeAccelerator::StopInputStream() {
1507 DVLOG(3) << "StopInputStream()";
1508 if (!input_streamon_)
1509 return true;
1511 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1512 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1513 input_streamon_ = false;
1515 // Reset accounting info for input.
1516 while (!input_ready_queue_.empty())
1517 input_ready_queue_.pop();
1518 free_input_buffers_.clear();
1519 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1520 free_input_buffers_.push_back(i);
1521 input_buffer_map_[i].at_device = false;
1522 input_buffer_map_[i].bytes_used = 0;
1523 input_buffer_map_[i].input_id = -1;
1525 input_buffer_queued_count_ = 0;
1527 return true;
1530 void V4L2VideoDecodeAccelerator::StartResolutionChange() {
1531 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1532 DCHECK_NE(decoder_state_, kUninitialized);
1533 DCHECK_NE(decoder_state_, kResetting);
1535 DVLOG(3) << "Initiate resolution change";
1537 if (!(StopDevicePoll() && StopOutputStream()))
1538 return;
1540 decoder_state_ = kChangingResolution;
1542 // Post a task to clean up buffers on child thread. This will also ensure
1543 // that we won't accept ReusePictureBuffer() anymore after that.
1544 child_task_runner_->PostTask(
1545 FROM_HERE,
1546 base::Bind(&V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers,
1547 weak_this_));
1550 void V4L2VideoDecodeAccelerator::FinishResolutionChange() {
1551 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1552 DCHECK_EQ(decoder_state_, kChangingResolution);
1553 DVLOG(3) << "FinishResolutionChange()";
1555 if (decoder_state_ == kError) {
1556 DVLOG(2) << "FinishResolutionChange(): early out: kError state";
1557 return;
1560 struct v4l2_format format;
1561 bool again;
1562 gfx::Size visible_size;
1563 bool ret = GetFormatInfo(&format, &visible_size, &again);
1564 if (!ret || again) {
1565 LOG(ERROR) << "Couldn't get format information after resolution change";
1566 NOTIFY_ERROR(PLATFORM_FAILURE);
1567 return;
1570 if (!CreateBuffersForFormat(format, visible_size)) {
1571 LOG(ERROR) << "Couldn't reallocate buffers after resolution change";
1572 NOTIFY_ERROR(PLATFORM_FAILURE);
1573 return;
1576 decoder_state_ = kDecoding;
1578 if (resolution_change_reset_pending_) {
1579 resolution_change_reset_pending_ = false;
1580 ResetTask();
1581 return;
1584 if (!StartDevicePoll())
1585 return;
1587 Enqueue();
1588 ScheduleDecodeBufferTaskIfNeeded();
1591 void V4L2VideoDecodeAccelerator::DevicePollTask(bool poll_device) {
1592 DVLOG(3) << "DevicePollTask()";
1593 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
1594 TRACE_EVENT0("Video Decoder", "V4L2VDA::DevicePollTask");
1596 bool event_pending = false;
1598 if (!device_->Poll(poll_device, &event_pending)) {
1599 NOTIFY_ERROR(PLATFORM_FAILURE);
1600 return;
1603 // All processing should happen on ServiceDeviceTask(), since we shouldn't
1604 // touch decoder state from this thread.
1605 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1606 &V4L2VideoDecodeAccelerator::ServiceDeviceTask,
1607 base::Unretained(this), event_pending));
1610 void V4L2VideoDecodeAccelerator::NotifyError(Error error) {
1611 DVLOG(2) << "NotifyError()";
1613 if (!child_task_runner_->BelongsToCurrentThread()) {
1614 child_task_runner_->PostTask(
1615 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::NotifyError,
1616 weak_this_, error));
1617 return;
1620 if (client_) {
1621 client_->NotifyError(error);
1622 client_ptr_factory_.reset();
1626 void V4L2VideoDecodeAccelerator::SetErrorState(Error error) {
1627 // We can touch decoder_state_ only if this is the decoder thread or the
1628 // decoder thread isn't running.
1629 if (decoder_thread_.message_loop() != NULL &&
1630 decoder_thread_.message_loop() != base::MessageLoop::current()) {
1631 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1632 &V4L2VideoDecodeAccelerator::SetErrorState,
1633 base::Unretained(this), error));
1634 return;
1637 // Post NotifyError only if we are already initialized, as the API does
1638 // not allow doing so before that.
1639 if (decoder_state_ != kError && decoder_state_ != kUninitialized)
1640 NotifyError(error);
1642 decoder_state_ = kError;
1645 bool V4L2VideoDecodeAccelerator::GetFormatInfo(struct v4l2_format* format,
1646 gfx::Size* visible_size,
1647 bool* again) {
1648 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1650 *again = false;
1651 memset(format, 0, sizeof(*format));
1652 format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1653 if (device_->Ioctl(VIDIOC_G_FMT, format) != 0) {
1654 if (errno == EINVAL) {
1655 // EINVAL means we haven't seen sufficient stream to decode the format.
1656 *again = true;
1657 return true;
1658 } else {
1659 PLOG(ERROR) << __func__ << "(): ioctl() failed: VIDIOC_G_FMT";
1660 NOTIFY_ERROR(PLATFORM_FAILURE);
1661 return false;
1665 // Make sure we are still getting the format we set on initialization.
1666 if (format->fmt.pix_mp.pixelformat != output_format_fourcc_) {
1667 LOG(ERROR) << "Unexpected format from G_FMT on output";
1668 return false;
1671 gfx::Size coded_size(format->fmt.pix_mp.width, format->fmt.pix_mp.height);
1672 if (visible_size != nullptr)
1673 *visible_size = GetVisibleSize(coded_size);
1675 return true;
1678 bool V4L2VideoDecodeAccelerator::CreateBuffersForFormat(
1679 const struct v4l2_format& format,
1680 const gfx::Size& visible_size) {
1681 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1682 output_planes_count_ = format.fmt.pix_mp.num_planes;
1683 coded_size_.SetSize(format.fmt.pix_mp.width, format.fmt.pix_mp.height);
1684 visible_size_ = visible_size;
1685 DVLOG(3) << "CreateBuffersForFormat(): new resolution: "
1686 << coded_size_.ToString() << ", visible size: "
1687 << visible_size_.ToString();
1689 return CreateOutputBuffers();
1692 gfx::Size V4L2VideoDecodeAccelerator::GetVisibleSize(
1693 const gfx::Size& coded_size) {
1694 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1696 struct v4l2_crop crop_arg;
1697 memset(&crop_arg, 0, sizeof(crop_arg));
1698 crop_arg.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1700 if (device_->Ioctl(VIDIOC_G_CROP, &crop_arg) != 0) {
1701 PLOG(ERROR) << "GetVisibleSize(): ioctl() VIDIOC_G_CROP failed";
1702 return coded_size;
1705 gfx::Rect rect(crop_arg.c.left, crop_arg.c.top, crop_arg.c.width,
1706 crop_arg.c.height);
1707 DVLOG(3) << "visible rectangle is " << rect.ToString();
1708 if (!gfx::Rect(coded_size).Contains(rect)) {
1709 DLOG(ERROR) << "visible rectangle " << rect.ToString()
1710 << " is not inside coded size " << coded_size.ToString();
1711 return coded_size;
1713 if (rect.IsEmpty()) {
1714 DLOG(ERROR) << "visible size is empty";
1715 return coded_size;
1718 // Chrome assume picture frame is coded at (0, 0).
1719 if (!rect.origin().IsOrigin()) {
1720 DLOG(ERROR) << "Unexpected visible rectangle " << rect.ToString()
1721 << ", top-left is not origin";
1722 return coded_size;
1725 return rect.size();
1728 bool V4L2VideoDecodeAccelerator::CreateInputBuffers() {
1729 DVLOG(3) << "CreateInputBuffers()";
1730 // We always run this as we prepare to initialize.
1731 DCHECK_EQ(decoder_state_, kUninitialized);
1732 DCHECK(!input_streamon_);
1733 DCHECK(input_buffer_map_.empty());
1735 struct v4l2_requestbuffers reqbufs;
1736 memset(&reqbufs, 0, sizeof(reqbufs));
1737 reqbufs.count = kInputBufferCount;
1738 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1739 reqbufs.memory = V4L2_MEMORY_MMAP;
1740 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1741 input_buffer_map_.resize(reqbufs.count);
1742 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1743 free_input_buffers_.push_back(i);
1745 // Query for the MEMORY_MMAP pointer.
1746 struct v4l2_plane planes[1];
1747 struct v4l2_buffer buffer;
1748 memset(&buffer, 0, sizeof(buffer));
1749 memset(planes, 0, sizeof(planes));
1750 buffer.index = i;
1751 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1752 buffer.memory = V4L2_MEMORY_MMAP;
1753 buffer.m.planes = planes;
1754 buffer.length = 1;
1755 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
1756 void* address = device_->Mmap(NULL,
1757 buffer.m.planes[0].length,
1758 PROT_READ | PROT_WRITE,
1759 MAP_SHARED,
1760 buffer.m.planes[0].m.mem_offset);
1761 if (address == MAP_FAILED) {
1762 PLOG(ERROR) << "CreateInputBuffers(): mmap() failed";
1763 return false;
1765 input_buffer_map_[i].address = address;
1766 input_buffer_map_[i].length = buffer.m.planes[0].length;
1769 return true;
1772 bool V4L2VideoDecodeAccelerator::SetupFormats() {
1773 // We always run this as we prepare to initialize.
1774 DCHECK_EQ(decoder_state_, kUninitialized);
1775 DCHECK(!input_streamon_);
1776 DCHECK(!output_streamon_);
1778 __u32 input_format_fourcc =
1779 V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, false);
1780 if (!input_format_fourcc) {
1781 NOTREACHED();
1782 return false;
1785 size_t input_size;
1786 gfx::Size max_resolution, min_resolution;
1787 device_->GetSupportedResolution(input_format_fourcc, &min_resolution,
1788 &max_resolution);
1789 if (max_resolution.width() > 1920 && max_resolution.height() > 1088)
1790 input_size = kInputBufferMaxSizeFor4k;
1791 else
1792 input_size = kInputBufferMaxSizeFor1080p;
1794 struct v4l2_format format;
1795 memset(&format, 0, sizeof(format));
1796 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1797 format.fmt.pix_mp.pixelformat = input_format_fourcc;
1798 format.fmt.pix_mp.plane_fmt[0].sizeimage = input_size;
1799 format.fmt.pix_mp.num_planes = 1;
1800 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1802 // We have to set up the format for output, because the driver may not allow
1803 // changing it once we start streaming; whether it can support our chosen
1804 // output format or not may depend on the input format.
1805 struct v4l2_fmtdesc fmtdesc;
1806 memset(&fmtdesc, 0, sizeof(fmtdesc));
1807 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1808 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
1809 if (device_->CanCreateEGLImageFrom(fmtdesc.pixelformat)) {
1810 output_format_fourcc_ = fmtdesc.pixelformat;
1811 break;
1813 ++fmtdesc.index;
1816 if (output_format_fourcc_ == 0) {
1817 LOG(ERROR) << "Could not find a usable output format";
1818 return false;
1821 // Just set the fourcc for output; resolution, etc., will come from the
1822 // driver once it extracts it from the stream.
1823 memset(&format, 0, sizeof(format));
1824 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1825 format.fmt.pix_mp.pixelformat = output_format_fourcc_;
1826 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1828 return true;
1831 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() {
1832 DVLOG(3) << "CreateOutputBuffers()";
1833 DCHECK(decoder_state_ == kInitialized ||
1834 decoder_state_ == kChangingResolution);
1835 DCHECK(!output_streamon_);
1836 DCHECK(output_buffer_map_.empty());
1838 // Number of output buffers we need.
1839 struct v4l2_control ctrl;
1840 memset(&ctrl, 0, sizeof(ctrl));
1841 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1842 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
1843 output_dpb_size_ = ctrl.value;
1845 // Output format setup in Initialize().
1847 // Allocate the output buffers.
1848 struct v4l2_requestbuffers reqbufs;
1849 memset(&reqbufs, 0, sizeof(reqbufs));
1850 reqbufs.count = output_dpb_size_ + kDpbOutputBufferExtraCount;
1851 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1852 reqbufs.memory = V4L2_MEMORY_MMAP;
1853 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1855 output_buffer_map_.resize(reqbufs.count);
1857 DVLOG(3) << "CreateOutputBuffers(): ProvidePictureBuffers(): "
1858 << "buffer_count=" << output_buffer_map_.size()
1859 << ", coded_size=" << coded_size_.ToString();
1860 child_task_runner_->PostTask(
1861 FROM_HERE, base::Bind(&Client::ProvidePictureBuffers, client_,
1862 output_buffer_map_.size(), coded_size_,
1863 device_->GetTextureTarget()));
1865 // Wait for the client to call AssignPictureBuffers() on the Child thread.
1866 // We do this, because if we continue decoding without finishing buffer
1867 // allocation, we may end up Resetting before AssignPictureBuffers arrives,
1868 // resulting in unnecessary complications and subtle bugs.
1869 // For example, if the client calls Decode(Input1), Reset(), Decode(Input2)
1870 // in a sequence, and Decode(Input1) results in us getting here and exiting
1871 // without waiting, we might end up running Reset{,Done}Task() before
1872 // AssignPictureBuffers is scheduled, thus cleaning up and pushing buffers
1873 // to the free_output_buffers_ map twice. If we somehow marked buffers as
1874 // not ready, we'd need special handling for restarting the second Decode
1875 // task and delaying it anyway.
1876 // Waiting here is not very costly and makes reasoning about different
1877 // situations much simpler.
1878 pictures_assigned_.Wait();
1880 Enqueue();
1881 return true;
1884 void V4L2VideoDecodeAccelerator::DestroyInputBuffers() {
1885 DVLOG(3) << "DestroyInputBuffers()";
1886 DCHECK(child_task_runner_->BelongsToCurrentThread());
1887 DCHECK(!input_streamon_);
1889 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1890 if (input_buffer_map_[i].address != NULL) {
1891 device_->Munmap(input_buffer_map_[i].address,
1892 input_buffer_map_[i].length);
1896 struct v4l2_requestbuffers reqbufs;
1897 memset(&reqbufs, 0, sizeof(reqbufs));
1898 reqbufs.count = 0;
1899 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1900 reqbufs.memory = V4L2_MEMORY_MMAP;
1901 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1903 input_buffer_map_.clear();
1904 free_input_buffers_.clear();
1907 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() {
1908 DVLOG(3) << "DestroyOutputBuffers()";
1909 DCHECK(child_task_runner_->BelongsToCurrentThread());
1910 DCHECK(!output_streamon_);
1911 bool success = true;
1913 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1914 OutputRecord& output_record = output_buffer_map_[i];
1916 if (output_record.egl_image != EGL_NO_IMAGE_KHR) {
1917 if (device_->DestroyEGLImage(egl_display_, output_record.egl_image) !=
1918 EGL_TRUE) {
1919 DVLOG(1) << __func__ << " DestroyEGLImage failed.";
1920 success = false;
1924 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1925 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1926 DVLOG(1) << __func__ << " eglDestroySyncKHR failed.";
1927 success = false;
1931 DVLOG(1) << "DestroyOutputBuffers(): dismissing PictureBuffer id="
1932 << output_record.picture_id;
1933 child_task_runner_->PostTask(
1934 FROM_HERE, base::Bind(&Client::DismissPictureBuffer, client_,
1935 output_record.picture_id));
1938 struct v4l2_requestbuffers reqbufs;
1939 memset(&reqbufs, 0, sizeof(reqbufs));
1940 reqbufs.count = 0;
1941 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1942 reqbufs.memory = V4L2_MEMORY_MMAP;
1943 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
1944 PLOG(ERROR) << "DestroyOutputBuffers() ioctl() failed: VIDIOC_REQBUFS";
1945 success = false;
1948 output_buffer_map_.clear();
1949 while (!free_output_buffers_.empty())
1950 free_output_buffers_.pop();
1952 return success;
1955 void V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers() {
1956 DCHECK(child_task_runner_->BelongsToCurrentThread());
1957 DVLOG(3) << "ResolutionChangeDestroyBuffers()";
1959 if (!DestroyOutputBuffers()) {
1960 LOG(ERROR) << __func__ << " Failed destroying output buffers.";
1961 NOTIFY_ERROR(PLATFORM_FAILURE);
1962 return;
1965 // Finish resolution change on decoder thread.
1966 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1967 &V4L2VideoDecodeAccelerator::FinishResolutionChange,
1968 base::Unretained(this)));
1971 void V4L2VideoDecodeAccelerator::SendPictureReady() {
1972 DVLOG(3) << "SendPictureReady()";
1973 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1974 bool resetting_or_flushing =
1975 (decoder_state_ == kResetting || decoder_flushing_);
1976 while (pending_picture_ready_.size() > 0) {
1977 bool cleared = pending_picture_ready_.front().cleared;
1978 const media::Picture& picture = pending_picture_ready_.front().picture;
1979 if (cleared && picture_clearing_count_ == 0) {
1980 // This picture is cleared. Post it to IO thread to reduce latency. This
1981 // should be the case after all pictures are cleared at the beginning.
1982 io_task_runner_->PostTask(
1983 FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture));
1984 pending_picture_ready_.pop();
1985 } else if (!cleared || resetting_or_flushing) {
1986 DVLOG(3) << "SendPictureReady()"
1987 << ". cleared=" << pending_picture_ready_.front().cleared
1988 << ", decoder_state_=" << decoder_state_
1989 << ", decoder_flushing_=" << decoder_flushing_
1990 << ", picture_clearing_count_=" << picture_clearing_count_;
1991 // If the picture is not cleared, post it to the child thread because it
1992 // has to be cleared in the child thread. A picture only needs to be
1993 // cleared once. If the decoder is resetting or flushing, send all
1994 // pictures to ensure PictureReady arrive before reset or flush done.
1995 child_task_runner_->PostTaskAndReply(
1996 FROM_HERE, base::Bind(&Client::PictureReady, client_, picture),
1997 // Unretained is safe. If Client::PictureReady gets to run, |this| is
1998 // alive. Destroy() will wait the decode thread to finish.
1999 base::Bind(&V4L2VideoDecodeAccelerator::PictureCleared,
2000 base::Unretained(this)));
2001 picture_clearing_count_++;
2002 pending_picture_ready_.pop();
2003 } else {
2004 // This picture is cleared. But some pictures are about to be cleared on
2005 // the child thread. To preserve the order, do not send this until those
2006 // pictures are cleared.
2007 break;
2012 void V4L2VideoDecodeAccelerator::PictureCleared() {
2013 DVLOG(3) << "PictureCleared(). clearing count=" << picture_clearing_count_;
2014 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
2015 DCHECK_GT(picture_clearing_count_, 0);
2016 picture_clearing_count_--;
2017 SendPictureReady();
2020 } // namespace content