[Android WebView] Fix webview perf bot switchover to use org.chromium.webview_shell...
[chromium-blink-merge.git] / content / common / gpu / media / v4l2_video_decode_accelerator.cc
blobdfd0866e47d77748e1eb6cb95a695d27c5a6c9e4
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <dlfcn.h>
6 #include <errno.h>
7 #include <fcntl.h>
8 #include <linux/videodev2.h>
9 #include <poll.h>
10 #include <sys/eventfd.h>
11 #include <sys/ioctl.h>
12 #include <sys/mman.h>
14 #include "base/bind.h"
15 #include "base/command_line.h"
16 #include "base/memory/shared_memory.h"
17 #include "base/message_loop/message_loop.h"
18 #include "base/numerics/safe_conversions.h"
19 #include "base/thread_task_runner_handle.h"
20 #include "base/trace_event/trace_event.h"
21 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
22 #include "media/base/media_switches.h"
23 #include "media/filters/h264_parser.h"
24 #include "ui/gfx/geometry/rect.h"
25 #include "ui/gl/scoped_binders.h"
27 #define NOTIFY_ERROR(x) \
28 do { \
29 LOG(ERROR) << "Setting error state:" << x; \
30 SetErrorState(x); \
31 } while (0)
33 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value) \
34 do { \
35 if (device_->Ioctl(type, arg) != 0) { \
36 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
37 NOTIFY_ERROR(PLATFORM_FAILURE); \
38 return value; \
39 } \
40 } while (0)
42 #define IOCTL_OR_ERROR_RETURN(type, arg) \
43 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0))
45 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
46 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false)
48 #define IOCTL_OR_LOG_ERROR(type, arg) \
49 do { \
50 if (device_->Ioctl(type, arg) != 0) \
51 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
52 } while (0)
54 namespace content {
56 namespace {
58 // TODO(posciak): remove once we update linux-headers.
59 #ifndef V4L2_EVENT_RESOLUTION_CHANGE
60 #define V4L2_EVENT_RESOLUTION_CHANGE 5
61 #endif
63 } // anonymous namespace
65 struct V4L2VideoDecodeAccelerator::BitstreamBufferRef {
66 BitstreamBufferRef(
67 base::WeakPtr<Client>& client,
68 scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
69 base::SharedMemory* shm,
70 size_t size,
71 int32 input_id);
72 ~BitstreamBufferRef();
73 const base::WeakPtr<Client> client;
74 const scoped_refptr<base::SingleThreadTaskRunner> client_task_runner;
75 const scoped_ptr<base::SharedMemory> shm;
76 const size_t size;
77 size_t bytes_used;
78 const int32 input_id;
81 struct V4L2VideoDecodeAccelerator::EGLSyncKHRRef {
82 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
83 ~EGLSyncKHRRef();
84 EGLDisplay const egl_display;
85 EGLSyncKHR egl_sync;
88 struct V4L2VideoDecodeAccelerator::PictureRecord {
89 PictureRecord(bool cleared, const media::Picture& picture);
90 ~PictureRecord();
91 bool cleared; // Whether the texture is cleared and safe to render from.
92 media::Picture picture; // The decoded picture.
95 V4L2VideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
96 base::WeakPtr<Client>& client,
97 scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
98 base::SharedMemory* shm,
99 size_t size,
100 int32 input_id)
101 : client(client),
102 client_task_runner(client_task_runner),
103 shm(shm),
104 size(size),
105 bytes_used(0),
106 input_id(input_id) {
109 V4L2VideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
110 if (input_id >= 0) {
111 client_task_runner->PostTask(
112 FROM_HERE,
113 base::Bind(&Client::NotifyEndOfBitstreamBuffer, client, input_id));
117 V4L2VideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
118 EGLDisplay egl_display, EGLSyncKHR egl_sync)
119 : egl_display(egl_display),
120 egl_sync(egl_sync) {
123 V4L2VideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
124 // We don't check for eglDestroySyncKHR failures, because if we get here
125 // with a valid sync object, something went wrong and we are getting
126 // destroyed anyway.
127 if (egl_sync != EGL_NO_SYNC_KHR)
128 eglDestroySyncKHR(egl_display, egl_sync);
131 V4L2VideoDecodeAccelerator::InputRecord::InputRecord()
132 : at_device(false),
133 address(NULL),
134 length(0),
135 bytes_used(0),
136 input_id(-1) {
139 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {
142 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord()
143 : at_device(false),
144 at_client(false),
145 egl_image(EGL_NO_IMAGE_KHR),
146 egl_sync(EGL_NO_SYNC_KHR),
147 picture_id(-1),
148 cleared(false) {
151 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {}
153 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(
154 bool cleared,
155 const media::Picture& picture)
156 : cleared(cleared), picture(picture) {}
158 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
160 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator(
161 EGLDisplay egl_display,
162 EGLContext egl_context,
163 const base::WeakPtr<Client>& io_client,
164 const base::Callback<bool(void)>& make_context_current,
165 const scoped_refptr<V4L2Device>& device,
166 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
167 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
168 io_task_runner_(io_task_runner),
169 io_client_(io_client),
170 decoder_thread_("V4L2DecoderThread"),
171 decoder_state_(kUninitialized),
172 device_(device),
173 decoder_delay_bitstream_buffer_id_(-1),
174 decoder_current_input_buffer_(-1),
175 decoder_decode_buffer_tasks_scheduled_(0),
176 decoder_frames_at_client_(0),
177 decoder_flushing_(false),
178 resolution_change_pending_(false),
179 resolution_change_reset_pending_(false),
180 decoder_partial_frame_pending_(false),
181 input_streamon_(false),
182 input_buffer_queued_count_(0),
183 output_streamon_(false),
184 output_buffer_queued_count_(0),
185 output_dpb_size_(0),
186 output_planes_count_(0),
187 picture_clearing_count_(0),
188 pictures_assigned_(false, false),
189 device_poll_thread_("V4L2DevicePollThread"),
190 make_context_current_(make_context_current),
191 egl_display_(egl_display),
192 egl_context_(egl_context),
193 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN),
194 output_format_fourcc_(0),
195 weak_this_factory_(this) {
196 weak_this_ = weak_this_factory_.GetWeakPtr();
199 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() {
200 DCHECK(!decoder_thread_.IsRunning());
201 DCHECK(!device_poll_thread_.IsRunning());
203 DestroyInputBuffers();
204 DestroyOutputBuffers();
206 // These maps have members that should be manually destroyed, e.g. file
207 // descriptors, mmap() segments, etc.
208 DCHECK(input_buffer_map_.empty());
209 DCHECK(output_buffer_map_.empty());
212 bool V4L2VideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
213 Client* client) {
214 DVLOG(3) << "Initialize()";
215 DCHECK(child_task_runner_->BelongsToCurrentThread());
216 DCHECK_EQ(decoder_state_, kUninitialized);
218 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
219 client_ = client_ptr_factory_->GetWeakPtr();
221 switch (profile) {
222 case media::H264PROFILE_BASELINE:
223 DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE";
224 break;
225 case media::H264PROFILE_MAIN:
226 DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN";
227 break;
228 case media::H264PROFILE_HIGH:
229 DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH";
230 break;
231 case media::VP8PROFILE_ANY:
232 DVLOG(2) << "Initialize(): profile VP8PROFILE_ANY";
233 break;
234 case media::VP9PROFILE_ANY:
235 DVLOG(2) << "Initialize(): profile VP9PROFILE_ANY";
236 break;
237 default:
238 DLOG(ERROR) << "Initialize(): unsupported profile=" << profile;
239 return false;
241 video_profile_ = profile;
243 if (egl_display_ == EGL_NO_DISPLAY) {
244 LOG(ERROR) << "Initialize(): could not get EGLDisplay";
245 return false;
248 // We need the context to be initialized to query extensions.
249 if (!make_context_current_.Run()) {
250 LOG(ERROR) << "Initialize(): could not make context current";
251 return false;
254 // TODO(posciak): crbug.com/450898.
255 #if defined(ARCH_CPU_ARMEL)
256 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
257 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync";
258 return false;
260 #endif
262 // Capabilities check.
263 struct v4l2_capability caps;
264 const __u32 kCapsRequired =
265 V4L2_CAP_VIDEO_CAPTURE_MPLANE |
266 V4L2_CAP_VIDEO_OUTPUT_MPLANE |
267 V4L2_CAP_STREAMING;
268 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
269 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
270 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
271 ", caps check failed: 0x" << std::hex << caps.capabilities;
272 return false;
275 if (!SetupFormats())
276 return false;
278 // Subscribe to the resolution change event.
279 struct v4l2_event_subscription sub;
280 memset(&sub, 0, sizeof(sub));
281 sub.type = V4L2_EVENT_RESOLUTION_CHANGE;
282 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_SUBSCRIBE_EVENT, &sub);
284 if (video_profile_ >= media::H264PROFILE_MIN &&
285 video_profile_ <= media::H264PROFILE_MAX) {
286 decoder_h264_parser_.reset(new media::H264Parser());
289 if (!CreateInputBuffers())
290 return false;
292 if (!decoder_thread_.Start()) {
293 LOG(ERROR) << "Initialize(): decoder thread failed to start";
294 return false;
297 decoder_state_ = kInitialized;
299 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here.
300 decoder_thread_.message_loop()->PostTask(
301 FROM_HERE,
302 base::Bind(
303 base::IgnoreResult(&V4L2VideoDecodeAccelerator::StartDevicePoll),
304 base::Unretained(this)));
306 return true;
309 void V4L2VideoDecodeAccelerator::Decode(
310 const media::BitstreamBuffer& bitstream_buffer) {
311 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
312 << ", size=" << bitstream_buffer.size();
313 DCHECK(io_task_runner_->BelongsToCurrentThread());
315 // DecodeTask() will take care of running a DecodeBufferTask().
316 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
317 &V4L2VideoDecodeAccelerator::DecodeTask, base::Unretained(this),
318 bitstream_buffer));
321 void V4L2VideoDecodeAccelerator::AssignPictureBuffers(
322 const std::vector<media::PictureBuffer>& buffers) {
323 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size();
324 DCHECK(child_task_runner_->BelongsToCurrentThread());
326 if (buffers.size() != output_buffer_map_.size()) {
327 LOG(ERROR) << "AssignPictureBuffers(): Failed to provide requested picture"
328 " buffers. (Got " << buffers.size()
329 << ", requested " << output_buffer_map_.size() << ")";
330 NOTIFY_ERROR(INVALID_ARGUMENT);
331 return;
334 if (!make_context_current_.Run()) {
335 LOG(ERROR) << "AssignPictureBuffers(): could not make context current";
336 NOTIFY_ERROR(PLATFORM_FAILURE);
337 return;
340 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0);
342 // It's safe to manipulate all the buffer state here, because the decoder
343 // thread is waiting on pictures_assigned_.
344 DCHECK(free_output_buffers_.empty());
345 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
346 DCHECK(buffers[i].size() == coded_size_);
348 OutputRecord& output_record = output_buffer_map_[i];
349 DCHECK(!output_record.at_device);
350 DCHECK(!output_record.at_client);
351 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
352 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
353 DCHECK_EQ(output_record.picture_id, -1);
354 DCHECK_EQ(output_record.cleared, false);
356 EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_,
357 egl_context_,
358 buffers[i].texture_id(),
359 coded_size_,
361 output_format_fourcc_,
362 output_planes_count_);
363 if (egl_image == EGL_NO_IMAGE_KHR) {
364 LOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
365 // Ownership of EGLImages allocated in previous iterations of this loop
366 // has been transferred to output_buffer_map_. After we error-out here
367 // the destructor will handle their cleanup.
368 NOTIFY_ERROR(PLATFORM_FAILURE);
369 return;
372 output_record.egl_image = egl_image;
373 output_record.picture_id = buffers[i].id();
374 free_output_buffers_.push(i);
375 DVLOG(3) << "AssignPictureBuffers(): buffer[" << i
376 << "]: picture_id=" << output_record.picture_id;
379 pictures_assigned_.Signal();
382 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) {
383 DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id;
384 // Must be run on child thread, as we'll insert a sync in the EGL context.
385 DCHECK(child_task_runner_->BelongsToCurrentThread());
387 if (!make_context_current_.Run()) {
388 LOG(ERROR) << "ReusePictureBuffer(): could not make context current";
389 NOTIFY_ERROR(PLATFORM_FAILURE);
390 return;
393 EGLSyncKHR egl_sync = EGL_NO_SYNC_KHR;
394 // TODO(posciak): crbug.com/450898.
395 #if defined(ARCH_CPU_ARMEL)
396 egl_sync = eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
397 if (egl_sync == EGL_NO_SYNC_KHR) {
398 LOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed";
399 NOTIFY_ERROR(PLATFORM_FAILURE);
400 return;
402 #endif
404 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef(
405 egl_display_, egl_sync));
406 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
407 &V4L2VideoDecodeAccelerator::ReusePictureBufferTask,
408 base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref)));
411 void V4L2VideoDecodeAccelerator::Flush() {
412 DVLOG(3) << "Flush()";
413 DCHECK(child_task_runner_->BelongsToCurrentThread());
414 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
415 &V4L2VideoDecodeAccelerator::FlushTask, base::Unretained(this)));
418 void V4L2VideoDecodeAccelerator::Reset() {
419 DVLOG(3) << "Reset()";
420 DCHECK(child_task_runner_->BelongsToCurrentThread());
421 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
422 &V4L2VideoDecodeAccelerator::ResetTask, base::Unretained(this)));
425 void V4L2VideoDecodeAccelerator::Destroy() {
426 DVLOG(3) << "Destroy()";
427 DCHECK(child_task_runner_->BelongsToCurrentThread());
429 // We're destroying; cancel all callbacks.
430 client_ptr_factory_.reset();
431 weak_this_factory_.InvalidateWeakPtrs();
433 // If the decoder thread is running, destroy using posted task.
434 if (decoder_thread_.IsRunning()) {
435 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
436 &V4L2VideoDecodeAccelerator::DestroyTask, base::Unretained(this)));
437 pictures_assigned_.Signal();
438 // DestroyTask() will cause the decoder_thread_ to flush all tasks.
439 decoder_thread_.Stop();
440 } else {
441 // Otherwise, call the destroy task directly.
442 DestroyTask();
445 delete this;
448 bool V4L2VideoDecodeAccelerator::CanDecodeOnIOThread() { return true; }
450 // static
451 media::VideoDecodeAccelerator::SupportedProfiles
452 V4L2VideoDecodeAccelerator::GetSupportedProfiles() {
453 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder);
454 if (!device)
455 return SupportedProfiles();
457 const uint32_t supported_formats[] = {
458 V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9};
459 return device->GetSupportedDecodeProfiles(arraysize(supported_formats),
460 supported_formats);
463 void V4L2VideoDecodeAccelerator::DecodeTask(
464 const media::BitstreamBuffer& bitstream_buffer) {
465 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_buffer.id();
466 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
467 DCHECK_NE(decoder_state_, kUninitialized);
468 TRACE_EVENT1("Video Decoder", "V4L2VDA::DecodeTask", "input_id",
469 bitstream_buffer.id());
471 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
472 io_client_, io_task_runner_,
473 new base::SharedMemory(bitstream_buffer.handle(), true),
474 bitstream_buffer.size(), bitstream_buffer.id()));
475 if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
476 LOG(ERROR) << "Decode(): could not map bitstream_buffer";
477 NOTIFY_ERROR(UNREADABLE_INPUT);
478 return;
480 DVLOG(3) << "DecodeTask(): mapped at=" << bitstream_record->shm->memory();
482 if (decoder_state_ == kResetting || decoder_flushing_) {
483 // In the case that we're resetting or flushing, we need to delay decoding
484 // the BitstreamBuffers that come after the Reset() or Flush() call. When
485 // we're here, we know that this DecodeTask() was scheduled by a Decode()
486 // call that came after (in the client thread) the Reset() or Flush() call;
487 // thus set up the delay if necessary.
488 if (decoder_delay_bitstream_buffer_id_ == -1)
489 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
490 } else if (decoder_state_ == kError) {
491 DVLOG(2) << "DecodeTask(): early out: kError state";
492 return;
495 decoder_input_queue_.push(
496 linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
497 decoder_decode_buffer_tasks_scheduled_++;
498 DecodeBufferTask();
501 void V4L2VideoDecodeAccelerator::DecodeBufferTask() {
502 DVLOG(3) << "DecodeBufferTask()";
503 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
504 DCHECK_NE(decoder_state_, kUninitialized);
505 TRACE_EVENT0("Video Decoder", "V4L2VDA::DecodeBufferTask");
507 decoder_decode_buffer_tasks_scheduled_--;
509 if (decoder_state_ == kResetting) {
510 DVLOG(2) << "DecodeBufferTask(): early out: kResetting state";
511 return;
512 } else if (decoder_state_ == kError) {
513 DVLOG(2) << "DecodeBufferTask(): early out: kError state";
514 return;
515 } else if (decoder_state_ == kChangingResolution) {
516 DVLOG(2) << "DecodeBufferTask(): early out: resolution change pending";
517 return;
520 if (decoder_current_bitstream_buffer_ == NULL) {
521 if (decoder_input_queue_.empty()) {
522 // We're waiting for a new buffer -- exit without scheduling a new task.
523 return;
525 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
526 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
527 // We're asked to delay decoding on this and subsequent buffers.
528 return;
531 // Setup to use the next buffer.
532 decoder_current_bitstream_buffer_.reset(buffer_ref.release());
533 decoder_input_queue_.pop();
534 DVLOG(3) << "DecodeBufferTask(): reading input_id="
535 << decoder_current_bitstream_buffer_->input_id
536 << ", addr=" << (decoder_current_bitstream_buffer_->shm ?
537 decoder_current_bitstream_buffer_->shm->memory() :
538 NULL)
539 << ", size=" << decoder_current_bitstream_buffer_->size;
541 bool schedule_task = false;
542 const size_t size = decoder_current_bitstream_buffer_->size;
543 size_t decoded_size = 0;
544 if (size == 0) {
545 const int32 input_id = decoder_current_bitstream_buffer_->input_id;
546 if (input_id >= 0) {
547 // This is a buffer queued from the client that has zero size. Skip.
548 schedule_task = true;
549 } else {
550 // This is a buffer of zero size, queued to flush the pipe. Flush.
551 DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(),
552 static_cast<base::SharedMemory*>(NULL));
553 // Enqueue a buffer guaranteed to be empty. To do that, we flush the
554 // current input, enqueue no data to the next frame, then flush that down.
555 schedule_task = true;
556 if (decoder_current_input_buffer_ != -1 &&
557 input_buffer_map_[decoder_current_input_buffer_].input_id !=
558 kFlushBufferId)
559 schedule_task = FlushInputFrame();
561 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
562 DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer";
563 decoder_partial_frame_pending_ = false;
564 schedule_task = true;
565 } else {
566 // If we failed to enqueue the empty buffer (due to pipeline
567 // backpressure), don't advance the bitstream buffer queue, and don't
568 // schedule the next task. This bitstream buffer queue entry will get
569 // reprocessed when the pipeline frees up.
570 schedule_task = false;
573 } else {
574 // This is a buffer queued from the client, with actual contents. Decode.
575 const uint8* const data =
576 reinterpret_cast<const uint8*>(
577 decoder_current_bitstream_buffer_->shm->memory()) +
578 decoder_current_bitstream_buffer_->bytes_used;
579 const size_t data_size =
580 decoder_current_bitstream_buffer_->size -
581 decoder_current_bitstream_buffer_->bytes_used;
582 if (!AdvanceFrameFragment(data, data_size, &decoded_size)) {
583 NOTIFY_ERROR(UNREADABLE_INPUT);
584 return;
586 // AdvanceFrameFragment should not return a size larger than the buffer
587 // size, even on invalid data.
588 CHECK_LE(decoded_size, data_size);
590 switch (decoder_state_) {
591 case kInitialized:
592 case kAfterReset:
593 schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
594 break;
595 case kDecoding:
596 schedule_task = DecodeBufferContinue(data, decoded_size);
597 break;
598 default:
599 NOTIFY_ERROR(ILLEGAL_STATE);
600 return;
603 if (decoder_state_ == kError) {
604 // Failed during decode.
605 return;
608 if (schedule_task) {
609 decoder_current_bitstream_buffer_->bytes_used += decoded_size;
610 if (decoder_current_bitstream_buffer_->bytes_used ==
611 decoder_current_bitstream_buffer_->size) {
612 // Our current bitstream buffer is done; return it.
613 int32 input_id = decoder_current_bitstream_buffer_->input_id;
614 DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id;
615 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
616 decoder_current_bitstream_buffer_.reset();
618 ScheduleDecodeBufferTaskIfNeeded();
622 bool V4L2VideoDecodeAccelerator::AdvanceFrameFragment(
623 const uint8* data,
624 size_t size,
625 size_t* endpos) {
626 if (video_profile_ >= media::H264PROFILE_MIN &&
627 video_profile_ <= media::H264PROFILE_MAX) {
628 // For H264, we need to feed HW one frame at a time. This is going to take
629 // some parsing of our input stream.
630 decoder_h264_parser_->SetStream(data, size);
631 media::H264NALU nalu;
632 media::H264Parser::Result result;
633 *endpos = 0;
635 // Keep on peeking the next NALs while they don't indicate a frame
636 // boundary.
637 for (;;) {
638 bool end_of_frame = false;
639 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
640 if (result == media::H264Parser::kInvalidStream ||
641 result == media::H264Parser::kUnsupportedStream)
642 return false;
643 if (result == media::H264Parser::kEOStream) {
644 // We've reached the end of the buffer before finding a frame boundary.
645 decoder_partial_frame_pending_ = true;
646 return true;
648 switch (nalu.nal_unit_type) {
649 case media::H264NALU::kNonIDRSlice:
650 case media::H264NALU::kIDRSlice:
651 if (nalu.size < 1)
652 return false;
653 // For these two, if the "first_mb_in_slice" field is zero, start a
654 // new frame and return. This field is Exp-Golomb coded starting on
655 // the eighth data bit of the NAL; a zero value is encoded with a
656 // leading '1' bit in the byte, which we can detect as the byte being
657 // (unsigned) greater than or equal to 0x80.
658 if (nalu.data[1] >= 0x80) {
659 end_of_frame = true;
660 break;
662 break;
663 case media::H264NALU::kSEIMessage:
664 case media::H264NALU::kSPS:
665 case media::H264NALU::kPPS:
666 case media::H264NALU::kAUD:
667 case media::H264NALU::kEOSeq:
668 case media::H264NALU::kEOStream:
669 case media::H264NALU::kReserved14:
670 case media::H264NALU::kReserved15:
671 case media::H264NALU::kReserved16:
672 case media::H264NALU::kReserved17:
673 case media::H264NALU::kReserved18:
674 // These unconditionally signal a frame boundary.
675 end_of_frame = true;
676 break;
677 default:
678 // For all others, keep going.
679 break;
681 if (end_of_frame) {
682 if (!decoder_partial_frame_pending_ && *endpos == 0) {
683 // The frame was previously restarted, and we haven't filled the
684 // current frame with any contents yet. Start the new frame here and
685 // continue parsing NALs.
686 } else {
687 // The frame wasn't previously restarted and/or we have contents for
688 // the current frame; signal the start of a new frame here: we don't
689 // have a partial frame anymore.
690 decoder_partial_frame_pending_ = false;
691 return true;
694 *endpos = (nalu.data + nalu.size) - data;
696 NOTREACHED();
697 return false;
698 } else {
699 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN);
700 DCHECK_LE(video_profile_, media::VP9PROFILE_MAX);
701 // For VP8/9, we can just dump the entire buffer. No fragmentation needed,
702 // and we never return a partial frame.
703 *endpos = size;
704 decoder_partial_frame_pending_ = false;
705 return true;
709 void V4L2VideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
710 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
712 // If we're behind on tasks, schedule another one.
713 int buffers_to_decode = decoder_input_queue_.size();
714 if (decoder_current_bitstream_buffer_ != NULL)
715 buffers_to_decode++;
716 if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
717 decoder_decode_buffer_tasks_scheduled_++;
718 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
719 &V4L2VideoDecodeAccelerator::DecodeBufferTask,
720 base::Unretained(this)));
724 bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(
725 const void* data, size_t size, size_t* endpos) {
726 DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size;
727 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
728 DCHECK_NE(decoder_state_, kUninitialized);
729 DCHECK_NE(decoder_state_, kDecoding);
730 // Initial decode. We haven't been able to get output stream format info yet.
731 // Get it, and start decoding.
733 // Copy in and send to HW.
734 if (!AppendToInputFrame(data, size))
735 return false;
737 // If we only have a partial frame, don't flush and process yet.
738 if (decoder_partial_frame_pending_)
739 return true;
741 if (!FlushInputFrame())
742 return false;
744 // Recycle buffers.
745 Dequeue();
747 // Check and see if we have format info yet.
748 struct v4l2_format format;
749 gfx::Size visible_size;
750 bool again = false;
751 if (!GetFormatInfo(&format, &visible_size, &again))
752 return false;
754 *endpos = size;
756 if (again) {
757 // Need more stream to decode format, return true and schedule next buffer.
758 return true;
761 // Run this initialization only on first startup.
762 if (decoder_state_ == kInitialized) {
763 DVLOG(3) << "DecodeBufferInitial(): running initialization";
764 // Success! Setup our parameters.
765 if (!CreateBuffersForFormat(format, visible_size))
766 return false;
769 decoder_state_ = kDecoding;
770 ScheduleDecodeBufferTaskIfNeeded();
771 return true;
774 bool V4L2VideoDecodeAccelerator::DecodeBufferContinue(
775 const void* data, size_t size) {
776 DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size;
777 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
778 DCHECK_EQ(decoder_state_, kDecoding);
780 // Both of these calls will set kError state if they fail.
781 // Only flush the frame if it's complete.
782 return (AppendToInputFrame(data, size) &&
783 (decoder_partial_frame_pending_ || FlushInputFrame()));
786 bool V4L2VideoDecodeAccelerator::AppendToInputFrame(
787 const void* data, size_t size) {
788 DVLOG(3) << "AppendToInputFrame()";
789 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
790 DCHECK_NE(decoder_state_, kUninitialized);
791 DCHECK_NE(decoder_state_, kResetting);
792 DCHECK_NE(decoder_state_, kError);
793 // This routine can handle data == NULL and size == 0, which occurs when
794 // we queue an empty buffer for the purposes of flushing the pipe.
796 // Flush if we're too big
797 if (decoder_current_input_buffer_ != -1) {
798 InputRecord& input_record =
799 input_buffer_map_[decoder_current_input_buffer_];
800 if (input_record.bytes_used + size > input_record.length) {
801 if (!FlushInputFrame())
802 return false;
803 decoder_current_input_buffer_ = -1;
807 // Try to get an available input buffer
808 if (decoder_current_input_buffer_ == -1) {
809 if (free_input_buffers_.empty()) {
810 // See if we can get more free buffers from HW
811 Dequeue();
812 if (free_input_buffers_.empty()) {
813 // Nope!
814 DVLOG(2) << "AppendToInputFrame(): stalled for input buffers";
815 return false;
818 decoder_current_input_buffer_ = free_input_buffers_.back();
819 free_input_buffers_.pop_back();
820 InputRecord& input_record =
821 input_buffer_map_[decoder_current_input_buffer_];
822 DCHECK_EQ(input_record.bytes_used, 0);
823 DCHECK_EQ(input_record.input_id, -1);
824 DCHECK(decoder_current_bitstream_buffer_ != NULL);
825 input_record.input_id = decoder_current_bitstream_buffer_->input_id;
828 DCHECK(data != NULL || size == 0);
829 if (size == 0) {
830 // If we asked for an empty buffer, return now. We return only after
831 // getting the next input buffer, since we might actually want an empty
832 // input buffer for flushing purposes.
833 return true;
836 // Copy in to the buffer.
837 InputRecord& input_record =
838 input_buffer_map_[decoder_current_input_buffer_];
839 if (size > input_record.length - input_record.bytes_used) {
840 LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring";
841 NOTIFY_ERROR(UNREADABLE_INPUT);
842 return false;
844 memcpy(
845 reinterpret_cast<uint8*>(input_record.address) + input_record.bytes_used,
846 data,
847 size);
848 input_record.bytes_used += size;
850 return true;
853 bool V4L2VideoDecodeAccelerator::FlushInputFrame() {
854 DVLOG(3) << "FlushInputFrame()";
855 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
856 DCHECK_NE(decoder_state_, kUninitialized);
857 DCHECK_NE(decoder_state_, kResetting);
858 DCHECK_NE(decoder_state_, kError);
860 if (decoder_current_input_buffer_ == -1)
861 return true;
863 InputRecord& input_record =
864 input_buffer_map_[decoder_current_input_buffer_];
865 DCHECK_NE(input_record.input_id, -1);
866 DCHECK(input_record.input_id != kFlushBufferId ||
867 input_record.bytes_used == 0);
868 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
869 // got from the client. We can skip it if it is empty.
870 // * if input_id < 0 (should be kFlushBufferId in this case), this input
871 // buffer was prompted by a flush buffer, and should be queued even when
872 // empty.
873 if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
874 input_record.input_id = -1;
875 free_input_buffers_.push_back(decoder_current_input_buffer_);
876 decoder_current_input_buffer_ = -1;
877 return true;
880 // Queue it.
881 input_ready_queue_.push(decoder_current_input_buffer_);
882 decoder_current_input_buffer_ = -1;
883 DVLOG(3) << "FlushInputFrame(): submitting input_id="
884 << input_record.input_id;
885 // Enqueue once since there's new available input for it.
886 Enqueue();
888 return (decoder_state_ != kError);
891 void V4L2VideoDecodeAccelerator::ServiceDeviceTask(bool event_pending) {
892 DVLOG(3) << "ServiceDeviceTask()";
893 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
894 DCHECK_NE(decoder_state_, kUninitialized);
895 TRACE_EVENT0("Video Decoder", "V4L2VDA::ServiceDeviceTask");
897 if (decoder_state_ == kResetting) {
898 DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state";
899 return;
900 } else if (decoder_state_ == kError) {
901 DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
902 return;
903 } else if (decoder_state_ == kChangingResolution) {
904 DVLOG(2) << "ServiceDeviceTask(): early out: kChangingResolution state";
905 return;
908 if (event_pending)
909 DequeueEvents();
910 Dequeue();
911 Enqueue();
913 // Clear the interrupt fd.
914 if (!device_->ClearDevicePollInterrupt()) {
915 NOTIFY_ERROR(PLATFORM_FAILURE);
916 return;
919 bool poll_device = false;
920 // Add fd, if we should poll on it.
921 // Can be polled as soon as either input or output buffers are queued.
922 if (input_buffer_queued_count_ + output_buffer_queued_count_ > 0)
923 poll_device = true;
925 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
926 // so either:
927 // * device_poll_thread_ is running normally
928 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
929 // shut it down, in which case we're either in kResetting or kError states
930 // respectively, and we should have early-outed already.
931 DCHECK(device_poll_thread_.message_loop());
932 // Queue the DevicePollTask() now.
933 device_poll_thread_.message_loop()->PostTask(
934 FROM_HERE,
935 base::Bind(&V4L2VideoDecodeAccelerator::DevicePollTask,
936 base::Unretained(this),
937 poll_device));
939 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC["
940 << decoder_input_queue_.size() << "->"
941 << input_ready_queue_.size() << "] => DEVICE["
942 << free_input_buffers_.size() << "+"
943 << input_buffer_queued_count_ << "/"
944 << input_buffer_map_.size() << "->"
945 << free_output_buffers_.size() << "+"
946 << output_buffer_queued_count_ << "/"
947 << output_buffer_map_.size() << "] => VDA["
948 << decoder_frames_at_client_ << "]";
950 ScheduleDecodeBufferTaskIfNeeded();
951 StartResolutionChangeIfNeeded();
954 void V4L2VideoDecodeAccelerator::Enqueue() {
955 DVLOG(3) << "Enqueue()";
956 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
957 DCHECK_NE(decoder_state_, kUninitialized);
958 TRACE_EVENT0("Video Decoder", "V4L2VDA::Enqueue");
960 // Drain the pipe of completed decode buffers.
961 const int old_inputs_queued = input_buffer_queued_count_;
962 while (!input_ready_queue_.empty()) {
963 if (!EnqueueInputRecord())
964 return;
966 if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
967 // We just started up a previously empty queue.
968 // Queue state changed; signal interrupt.
969 if (!device_->SetDevicePollInterrupt()) {
970 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
971 NOTIFY_ERROR(PLATFORM_FAILURE);
972 return;
974 // Start VIDIOC_STREAMON if we haven't yet.
975 if (!input_streamon_) {
976 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
977 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
978 input_streamon_ = true;
982 // Enqueue all the outputs we can.
983 const int old_outputs_queued = output_buffer_queued_count_;
984 while (!free_output_buffers_.empty()) {
985 if (!EnqueueOutputRecord())
986 return;
988 if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
989 // We just started up a previously empty queue.
990 // Queue state changed; signal interrupt.
991 if (!device_->SetDevicePollInterrupt()) {
992 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
993 NOTIFY_ERROR(PLATFORM_FAILURE);
994 return;
996 // Start VIDIOC_STREAMON if we haven't yet.
997 if (!output_streamon_) {
998 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
999 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
1000 output_streamon_ = true;
1005 void V4L2VideoDecodeAccelerator::DequeueEvents() {
1006 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1007 DCHECK_NE(decoder_state_, kUninitialized);
1008 DVLOG(3) << "DequeueEvents()";
1010 struct v4l2_event ev;
1011 memset(&ev, 0, sizeof(ev));
1013 while (device_->Ioctl(VIDIOC_DQEVENT, &ev) == 0) {
1014 if (ev.type == V4L2_EVENT_RESOLUTION_CHANGE) {
1015 DVLOG(3) << "DequeueEvents(): got resolution change event.";
1016 DCHECK(!resolution_change_pending_);
1017 resolution_change_pending_ = IsResolutionChangeNecessary();
1018 } else {
1019 LOG(ERROR) << "DequeueEvents(): got an event (" << ev.type
1020 << ") we haven't subscribed to.";
1025 void V4L2VideoDecodeAccelerator::Dequeue() {
1026 DVLOG(3) << "Dequeue()";
1027 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1028 DCHECK_NE(decoder_state_, kUninitialized);
1029 TRACE_EVENT0("Video Decoder", "V4L2VDA::Dequeue");
1031 // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free
1032 // list.
1033 while (input_buffer_queued_count_ > 0) {
1034 DCHECK(input_streamon_);
1035 struct v4l2_buffer dqbuf;
1036 struct v4l2_plane planes[1];
1037 memset(&dqbuf, 0, sizeof(dqbuf));
1038 memset(planes, 0, sizeof(planes));
1039 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1040 dqbuf.memory = V4L2_MEMORY_MMAP;
1041 dqbuf.m.planes = planes;
1042 dqbuf.length = 1;
1043 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1044 if (errno == EAGAIN) {
1045 // EAGAIN if we're just out of buffers to dequeue.
1046 break;
1048 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1049 NOTIFY_ERROR(PLATFORM_FAILURE);
1050 return;
1052 InputRecord& input_record = input_buffer_map_[dqbuf.index];
1053 DCHECK(input_record.at_device);
1054 free_input_buffers_.push_back(dqbuf.index);
1055 input_record.at_device = false;
1056 input_record.bytes_used = 0;
1057 input_record.input_id = -1;
1058 input_buffer_queued_count_--;
1061 // Dequeue completed output (VIDEO_CAPTURE) buffers, and queue to the
1062 // completed queue.
1063 while (output_buffer_queued_count_ > 0) {
1064 DCHECK(output_streamon_);
1065 struct v4l2_buffer dqbuf;
1066 scoped_ptr<struct v4l2_plane[]> planes(
1067 new v4l2_plane[output_planes_count_]);
1068 memset(&dqbuf, 0, sizeof(dqbuf));
1069 memset(planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1070 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1071 dqbuf.memory = V4L2_MEMORY_MMAP;
1072 dqbuf.m.planes = planes.get();
1073 dqbuf.length = output_planes_count_;
1074 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1075 if (errno == EAGAIN) {
1076 // EAGAIN if we're just out of buffers to dequeue.
1077 break;
1079 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1080 NOTIFY_ERROR(PLATFORM_FAILURE);
1081 return;
1083 OutputRecord& output_record = output_buffer_map_[dqbuf.index];
1084 DCHECK(output_record.at_device);
1085 DCHECK(!output_record.at_client);
1086 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1087 DCHECK_NE(output_record.picture_id, -1);
1088 output_record.at_device = false;
1089 if (dqbuf.m.planes[0].bytesused == 0) {
1090 // This is an empty output buffer returned as part of a flush.
1091 free_output_buffers_.push(dqbuf.index);
1092 } else {
1093 DCHECK_GE(dqbuf.timestamp.tv_sec, 0);
1094 output_record.at_client = true;
1095 DVLOG(3) << "Dequeue(): returning input_id=" << dqbuf.timestamp.tv_sec
1096 << " as picture_id=" << output_record.picture_id;
1097 const media::Picture& picture =
1098 media::Picture(output_record.picture_id, dqbuf.timestamp.tv_sec,
1099 gfx::Rect(visible_size_), false);
1100 pending_picture_ready_.push(
1101 PictureRecord(output_record.cleared, picture));
1102 SendPictureReady();
1103 output_record.cleared = true;
1104 decoder_frames_at_client_++;
1106 output_buffer_queued_count_--;
1109 NotifyFlushDoneIfNeeded();
1112 bool V4L2VideoDecodeAccelerator::EnqueueInputRecord() {
1113 DVLOG(3) << "EnqueueInputRecord()";
1114 DCHECK(!input_ready_queue_.empty());
1116 // Enqueue an input (VIDEO_OUTPUT) buffer.
1117 const int buffer = input_ready_queue_.front();
1118 InputRecord& input_record = input_buffer_map_[buffer];
1119 DCHECK(!input_record.at_device);
1120 struct v4l2_buffer qbuf;
1121 struct v4l2_plane qbuf_plane;
1122 memset(&qbuf, 0, sizeof(qbuf));
1123 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1124 qbuf.index = buffer;
1125 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1126 qbuf.timestamp.tv_sec = input_record.input_id;
1127 qbuf.memory = V4L2_MEMORY_MMAP;
1128 qbuf.m.planes = &qbuf_plane;
1129 qbuf.m.planes[0].bytesused = input_record.bytes_used;
1130 qbuf.length = 1;
1131 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1132 input_ready_queue_.pop();
1133 input_record.at_device = true;
1134 input_buffer_queued_count_++;
1135 DVLOG(3) << "EnqueueInputRecord(): enqueued input_id="
1136 << input_record.input_id << " size=" << input_record.bytes_used;
1137 return true;
1140 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() {
1141 DVLOG(3) << "EnqueueOutputRecord()";
1142 DCHECK(!free_output_buffers_.empty());
1144 // Enqueue an output (VIDEO_CAPTURE) buffer.
1145 const int buffer = free_output_buffers_.front();
1146 OutputRecord& output_record = output_buffer_map_[buffer];
1147 DCHECK(!output_record.at_device);
1148 DCHECK(!output_record.at_client);
1149 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1150 DCHECK_NE(output_record.picture_id, -1);
1151 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1152 TRACE_EVENT0("Video Decoder",
1153 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR");
1154 // If we have to wait for completion, wait. Note that
1155 // free_output_buffers_ is a FIFO queue, so we always wait on the
1156 // buffer that has been in the queue the longest.
1157 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
1158 EGL_FOREVER_KHR) == EGL_FALSE) {
1159 // This will cause tearing, but is safe otherwise.
1160 DVLOG(1) << __func__ << " eglClientWaitSyncKHR failed!";
1162 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1163 LOG(ERROR) << __func__ << " eglDestroySyncKHR failed!";
1164 NOTIFY_ERROR(PLATFORM_FAILURE);
1165 return false;
1167 output_record.egl_sync = EGL_NO_SYNC_KHR;
1169 struct v4l2_buffer qbuf;
1170 scoped_ptr<struct v4l2_plane[]> qbuf_planes(
1171 new v4l2_plane[output_planes_count_]);
1172 memset(&qbuf, 0, sizeof(qbuf));
1173 memset(
1174 qbuf_planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1175 qbuf.index = buffer;
1176 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1177 qbuf.memory = V4L2_MEMORY_MMAP;
1178 qbuf.m.planes = qbuf_planes.get();
1179 qbuf.length = output_planes_count_;
1180 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1181 free_output_buffers_.pop();
1182 output_record.at_device = true;
1183 output_buffer_queued_count_++;
1184 return true;
1187 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask(
1188 int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1189 DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id="
1190 << picture_buffer_id;
1191 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1192 TRACE_EVENT0("Video Decoder", "V4L2VDA::ReusePictureBufferTask");
1194 // We run ReusePictureBufferTask even if we're in kResetting.
1195 if (decoder_state_ == kError) {
1196 DVLOG(2) << "ReusePictureBufferTask(): early out: kError state";
1197 return;
1200 if (decoder_state_ == kChangingResolution) {
1201 DVLOG(2) << "ReusePictureBufferTask(): early out: kChangingResolution";
1202 return;
1205 size_t index;
1206 for (index = 0; index < output_buffer_map_.size(); ++index)
1207 if (output_buffer_map_[index].picture_id == picture_buffer_id)
1208 break;
1210 if (index >= output_buffer_map_.size()) {
1211 // It's possible that we've already posted a DismissPictureBuffer for this
1212 // picture, but it has not yet executed when this ReusePictureBuffer was
1213 // posted to us by the client. In that case just ignore this (we've already
1214 // dismissed it and accounted for that) and let the sync object get
1215 // destroyed.
1216 DVLOG(4) << "ReusePictureBufferTask(): got picture id= "
1217 << picture_buffer_id << " not in use (anymore?).";
1218 return;
1221 OutputRecord& output_record = output_buffer_map_[index];
1222 if (output_record.at_device || !output_record.at_client) {
1223 LOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable";
1224 NOTIFY_ERROR(INVALID_ARGUMENT);
1225 return;
1228 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1229 DCHECK(!output_record.at_device);
1230 output_record.at_client = false;
1231 output_record.egl_sync = egl_sync_ref->egl_sync;
1232 free_output_buffers_.push(index);
1233 decoder_frames_at_client_--;
1234 // Take ownership of the EGLSync.
1235 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1236 // We got a buffer back, so enqueue it back.
1237 Enqueue();
1240 void V4L2VideoDecodeAccelerator::FlushTask() {
1241 DVLOG(3) << "FlushTask()";
1242 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1243 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask");
1245 // Flush outstanding buffers.
1246 if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) {
1247 // There's nothing in the pipe, so return done immediately.
1248 DVLOG(3) << "FlushTask(): returning flush";
1249 child_task_runner_->PostTask(FROM_HERE,
1250 base::Bind(&Client::NotifyFlushDone, client_));
1251 return;
1252 } else if (decoder_state_ == kError) {
1253 DVLOG(2) << "FlushTask(): early out: kError state";
1254 return;
1257 // We don't support stacked flushing.
1258 DCHECK(!decoder_flushing_);
1260 // Queue up an empty buffer -- this triggers the flush.
1261 decoder_input_queue_.push(
1262 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
1263 io_client_, io_task_runner_, NULL, 0, kFlushBufferId)));
1264 decoder_flushing_ = true;
1265 SendPictureReady(); // Send all pending PictureReady.
1267 ScheduleDecodeBufferTaskIfNeeded();
1270 void V4L2VideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1271 if (!decoder_flushing_)
1272 return;
1274 // Pipeline is empty when:
1275 // * Decoder input queue is empty of non-delayed buffers.
1276 // * There is no currently filling input buffer.
1277 // * Input holding queue is empty.
1278 // * All input (VIDEO_OUTPUT) buffers are returned.
1279 if (!decoder_input_queue_.empty()) {
1280 if (decoder_input_queue_.front()->input_id !=
1281 decoder_delay_bitstream_buffer_id_)
1282 return;
1284 if (decoder_current_input_buffer_ != -1)
1285 return;
1286 if ((input_ready_queue_.size() + input_buffer_queued_count_) != 0)
1287 return;
1289 // TODO(posciak): crbug.com/270039. Exynos requires a streamoff-streamon
1290 // sequence after flush to continue, even if we are not resetting. This would
1291 // make sense, because we don't really want to resume from a non-resume point
1292 // (e.g. not from an IDR) if we are flushed.
1293 // MSE player however triggers a Flush() on chunk end, but never Reset(). One
1294 // could argue either way, or even say that Flush() is not needed/harmful when
1295 // transitioning to next chunk.
1296 // For now, do the streamoff-streamon cycle to satisfy Exynos and not freeze
1297 // when doing MSE. This should be harmless otherwise.
1298 if (!StopDevicePoll(false))
1299 return;
1301 if (!StartDevicePoll())
1302 return;
1304 decoder_delay_bitstream_buffer_id_ = -1;
1305 decoder_flushing_ = false;
1306 DVLOG(3) << "NotifyFlushDoneIfNeeded(): returning flush";
1307 child_task_runner_->PostTask(FROM_HERE,
1308 base::Bind(&Client::NotifyFlushDone, client_));
1310 // While we were flushing, we early-outed DecodeBufferTask()s.
1311 ScheduleDecodeBufferTaskIfNeeded();
1314 void V4L2VideoDecodeAccelerator::ResetTask() {
1315 DVLOG(3) << "ResetTask()";
1316 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1317 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetTask");
1319 if (decoder_state_ == kError) {
1320 DVLOG(2) << "ResetTask(): early out: kError state";
1321 return;
1324 // If we are in the middle of switching resolutions, postpone reset until
1325 // it's done. We don't have to worry about timing of this wrt to decoding,
1326 // because input pipe is already stopped if we are changing resolution.
1327 // We will come back here after we are done with the resolution change.
1328 DCHECK(!resolution_change_reset_pending_);
1329 if (resolution_change_pending_ || decoder_state_ == kChangingResolution) {
1330 resolution_change_reset_pending_ = true;
1331 return;
1334 // We stop streaming and clear buffer tracking info (not preserving inputs).
1335 // StopDevicePoll() unconditionally does _not_ destroy buffers, however.
1336 if (!StopDevicePoll(false))
1337 return;
1339 decoder_current_bitstream_buffer_.reset();
1340 while (!decoder_input_queue_.empty())
1341 decoder_input_queue_.pop();
1343 decoder_current_input_buffer_ = -1;
1345 // If we were flushing, we'll never return any more BitstreamBuffers or
1346 // PictureBuffers; they have all been dropped and returned by now.
1347 NotifyFlushDoneIfNeeded();
1349 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening
1350 // jobs will early-out in the kResetting state.
1351 decoder_state_ = kResetting;
1352 SendPictureReady(); // Send all pending PictureReady.
1353 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1354 &V4L2VideoDecodeAccelerator::ResetDoneTask, base::Unretained(this)));
1357 void V4L2VideoDecodeAccelerator::ResetDoneTask() {
1358 DVLOG(3) << "ResetDoneTask()";
1359 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1360 TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetDoneTask");
1362 if (decoder_state_ == kError) {
1363 DVLOG(2) << "ResetDoneTask(): early out: kError state";
1364 return;
1367 if (!StartDevicePoll())
1368 return;
1370 // We might have received a resolution change event while we were waiting
1371 // for the reset to finish. The codec will not post another event if the
1372 // resolution after reset remains the same as the one to which were just
1373 // about to switch, so preserve the event across reset so we can address
1374 // it after resuming.
1376 // Reset format-specific bits.
1377 if (video_profile_ >= media::H264PROFILE_MIN &&
1378 video_profile_ <= media::H264PROFILE_MAX) {
1379 decoder_h264_parser_.reset(new media::H264Parser());
1382 // Jobs drained, we're finished resetting.
1383 DCHECK_EQ(decoder_state_, kResetting);
1384 if (output_buffer_map_.empty()) {
1385 // We must have gotten Reset() before we had a chance to request buffers
1386 // from the client.
1387 decoder_state_ = kInitialized;
1388 } else {
1389 decoder_state_ = kAfterReset;
1392 decoder_partial_frame_pending_ = false;
1393 decoder_delay_bitstream_buffer_id_ = -1;
1394 child_task_runner_->PostTask(FROM_HERE,
1395 base::Bind(&Client::NotifyResetDone, client_));
1397 // While we were resetting, we early-outed DecodeBufferTask()s.
1398 ScheduleDecodeBufferTaskIfNeeded();
1401 void V4L2VideoDecodeAccelerator::DestroyTask() {
1402 DVLOG(3) << "DestroyTask()";
1403 TRACE_EVENT0("Video Decoder", "V4L2VDA::DestroyTask");
1405 // DestroyTask() should run regardless of decoder_state_.
1407 // Stop streaming and the device_poll_thread_.
1408 StopDevicePoll(false);
1410 decoder_current_bitstream_buffer_.reset();
1411 decoder_current_input_buffer_ = -1;
1412 decoder_decode_buffer_tasks_scheduled_ = 0;
1413 decoder_frames_at_client_ = 0;
1414 while (!decoder_input_queue_.empty())
1415 decoder_input_queue_.pop();
1416 decoder_flushing_ = false;
1418 // Set our state to kError. Just in case.
1419 decoder_state_ = kError;
1422 bool V4L2VideoDecodeAccelerator::StartDevicePoll() {
1423 DVLOG(3) << "StartDevicePoll()";
1424 DCHECK(!device_poll_thread_.IsRunning());
1425 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1427 // Start up the device poll thread and schedule its first DevicePollTask().
1428 if (!device_poll_thread_.Start()) {
1429 LOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
1430 NOTIFY_ERROR(PLATFORM_FAILURE);
1431 return false;
1433 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1434 &V4L2VideoDecodeAccelerator::DevicePollTask,
1435 base::Unretained(this),
1436 0));
1438 return true;
1441 bool V4L2VideoDecodeAccelerator::StopDevicePoll(bool keep_input_state) {
1442 DVLOG(3) << "StopDevicePoll()";
1443 if (decoder_thread_.IsRunning())
1444 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1446 // Signal the DevicePollTask() to stop, and stop the device poll thread.
1447 if (!device_->SetDevicePollInterrupt()) {
1448 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
1449 NOTIFY_ERROR(PLATFORM_FAILURE);
1450 return false;
1452 device_poll_thread_.Stop();
1453 // Clear the interrupt now, to be sure.
1454 if (!device_->ClearDevicePollInterrupt()) {
1455 NOTIFY_ERROR(PLATFORM_FAILURE);
1456 return false;
1459 // Stop streaming.
1460 if (!keep_input_state) {
1461 if (input_streamon_) {
1462 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1463 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1465 input_streamon_ = false;
1467 if (output_streamon_) {
1468 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1469 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1471 output_streamon_ = false;
1473 // Reset all our accounting info.
1474 if (!keep_input_state) {
1475 while (!input_ready_queue_.empty())
1476 input_ready_queue_.pop();
1477 free_input_buffers_.clear();
1478 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1479 free_input_buffers_.push_back(i);
1480 input_buffer_map_[i].at_device = false;
1481 input_buffer_map_[i].bytes_used = 0;
1482 input_buffer_map_[i].input_id = -1;
1484 input_buffer_queued_count_ = 0;
1487 while (!free_output_buffers_.empty())
1488 free_output_buffers_.pop();
1490 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1491 OutputRecord& output_record = output_buffer_map_[i];
1492 DCHECK(!(output_record.at_client && output_record.at_device));
1494 // After streamoff, the device drops ownership of all buffers, even if
1495 // we don't dequeue them explicitly.
1496 output_buffer_map_[i].at_device = false;
1497 // Some of them may still be owned by the client however.
1498 // Reuse only those that aren't.
1499 if (!output_record.at_client) {
1500 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1501 free_output_buffers_.push(i);
1504 output_buffer_queued_count_ = 0;
1506 DVLOG(3) << "StopDevicePoll(): device poll stopped";
1507 return true;
1510 void V4L2VideoDecodeAccelerator::StartResolutionChangeIfNeeded() {
1511 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1512 DCHECK_NE(decoder_state_, kUninitialized);
1513 DCHECK_NE(decoder_state_, kResetting);
1515 if (!resolution_change_pending_)
1516 return;
1518 DVLOG(3) << "No more work, initiate resolution change";
1520 // Keep input queue.
1521 if (!StopDevicePoll(true))
1522 return;
1524 decoder_state_ = kChangingResolution;
1525 DCHECK(resolution_change_pending_);
1526 resolution_change_pending_ = false;
1528 // Post a task to clean up buffers on child thread. This will also ensure
1529 // that we won't accept ReusePictureBuffer() anymore after that.
1530 child_task_runner_->PostTask(
1531 FROM_HERE,
1532 base::Bind(&V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers,
1533 weak_this_));
1536 void V4L2VideoDecodeAccelerator::FinishResolutionChange() {
1537 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1538 DCHECK_EQ(decoder_state_, kChangingResolution);
1539 DVLOG(3) << "FinishResolutionChange()";
1541 if (decoder_state_ == kError) {
1542 DVLOG(2) << "FinishResolutionChange(): early out: kError state";
1543 return;
1546 struct v4l2_format format;
1547 bool again;
1548 gfx::Size visible_size;
1549 bool ret = GetFormatInfo(&format, &visible_size, &again);
1550 if (!ret || again) {
1551 LOG(ERROR) << "Couldn't get format information after resolution change";
1552 NOTIFY_ERROR(PLATFORM_FAILURE);
1553 return;
1556 if (!CreateBuffersForFormat(format, visible_size)) {
1557 LOG(ERROR) << "Couldn't reallocate buffers after resolution change";
1558 NOTIFY_ERROR(PLATFORM_FAILURE);
1559 return;
1562 decoder_state_ = kDecoding;
1564 if (resolution_change_reset_pending_) {
1565 resolution_change_reset_pending_ = false;
1566 ResetTask();
1567 return;
1570 if (!StartDevicePoll())
1571 return;
1573 Enqueue();
1574 ScheduleDecodeBufferTaskIfNeeded();
1577 void V4L2VideoDecodeAccelerator::DevicePollTask(bool poll_device) {
1578 DVLOG(3) << "DevicePollTask()";
1579 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
1580 TRACE_EVENT0("Video Decoder", "V4L2VDA::DevicePollTask");
1582 bool event_pending = false;
1584 if (!device_->Poll(poll_device, &event_pending)) {
1585 NOTIFY_ERROR(PLATFORM_FAILURE);
1586 return;
1589 // All processing should happen on ServiceDeviceTask(), since we shouldn't
1590 // touch decoder state from this thread.
1591 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1592 &V4L2VideoDecodeAccelerator::ServiceDeviceTask,
1593 base::Unretained(this), event_pending));
1596 void V4L2VideoDecodeAccelerator::NotifyError(Error error) {
1597 DVLOG(2) << "NotifyError()";
1599 if (!child_task_runner_->BelongsToCurrentThread()) {
1600 child_task_runner_->PostTask(
1601 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::NotifyError,
1602 weak_this_, error));
1603 return;
1606 if (client_) {
1607 client_->NotifyError(error);
1608 client_ptr_factory_.reset();
1612 void V4L2VideoDecodeAccelerator::SetErrorState(Error error) {
1613 // We can touch decoder_state_ only if this is the decoder thread or the
1614 // decoder thread isn't running.
1615 if (decoder_thread_.message_loop() != NULL &&
1616 decoder_thread_.message_loop() != base::MessageLoop::current()) {
1617 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1618 &V4L2VideoDecodeAccelerator::SetErrorState,
1619 base::Unretained(this), error));
1620 return;
1623 // Post NotifyError only if we are already initialized, as the API does
1624 // not allow doing so before that.
1625 if (decoder_state_ != kError && decoder_state_ != kUninitialized)
1626 NotifyError(error);
1628 decoder_state_ = kError;
1631 bool V4L2VideoDecodeAccelerator::GetFormatInfo(struct v4l2_format* format,
1632 gfx::Size* visible_size,
1633 bool* again) {
1634 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1636 *again = false;
1637 memset(format, 0, sizeof(*format));
1638 format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1639 if (device_->Ioctl(VIDIOC_G_FMT, format) != 0) {
1640 if (errno == EINVAL) {
1641 // EINVAL means we haven't seen sufficient stream to decode the format.
1642 *again = true;
1643 return true;
1644 } else {
1645 PLOG(ERROR) << __func__ << "(): ioctl() failed: VIDIOC_G_FMT";
1646 NOTIFY_ERROR(PLATFORM_FAILURE);
1647 return false;
1651 // Make sure we are still getting the format we set on initialization.
1652 if (format->fmt.pix_mp.pixelformat != output_format_fourcc_) {
1653 LOG(ERROR) << "Unexpected format from G_FMT on output";
1654 return false;
1657 gfx::Size coded_size(format->fmt.pix_mp.width, format->fmt.pix_mp.height);
1658 if (visible_size != nullptr)
1659 *visible_size = GetVisibleSize(coded_size);
1661 return true;
1664 bool V4L2VideoDecodeAccelerator::CreateBuffersForFormat(
1665 const struct v4l2_format& format,
1666 const gfx::Size& visible_size) {
1667 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1668 output_planes_count_ = format.fmt.pix_mp.num_planes;
1669 coded_size_.SetSize(format.fmt.pix_mp.width, format.fmt.pix_mp.height);
1670 visible_size_ = visible_size;
1671 DVLOG(3) << "CreateBuffersForFormat(): new resolution: "
1672 << coded_size_.ToString() << ", visible size: "
1673 << visible_size_.ToString();
1675 if (!CreateOutputBuffers())
1676 return false;
1678 return true;
1681 gfx::Size V4L2VideoDecodeAccelerator::GetVisibleSize(
1682 const gfx::Size& coded_size) {
1683 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1685 struct v4l2_crop crop_arg;
1686 memset(&crop_arg, 0, sizeof(crop_arg));
1687 crop_arg.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1689 if (device_->Ioctl(VIDIOC_G_CROP, &crop_arg) != 0) {
1690 PLOG(ERROR) << "GetVisibleSize(): ioctl() VIDIOC_G_CROP failed";
1691 return coded_size;
1694 gfx::Rect rect(crop_arg.c.left, crop_arg.c.top, crop_arg.c.width,
1695 crop_arg.c.height);
1696 DVLOG(3) << "visible rectangle is " << rect.ToString();
1697 if (!gfx::Rect(coded_size).Contains(rect)) {
1698 DLOG(ERROR) << "visible rectangle " << rect.ToString()
1699 << " is not inside coded size " << coded_size.ToString();
1700 return coded_size;
1702 if (rect.IsEmpty()) {
1703 DLOG(ERROR) << "visible size is empty";
1704 return coded_size;
1707 // Chrome assume picture frame is coded at (0, 0).
1708 if (!rect.origin().IsOrigin()) {
1709 DLOG(ERROR) << "Unexpected visible rectangle " << rect.ToString()
1710 << ", top-left is not origin";
1711 return coded_size;
1714 return rect.size();
1717 bool V4L2VideoDecodeAccelerator::CreateInputBuffers() {
1718 DVLOG(3) << "CreateInputBuffers()";
1719 // We always run this as we prepare to initialize.
1720 DCHECK_EQ(decoder_state_, kUninitialized);
1721 DCHECK(!input_streamon_);
1722 DCHECK(input_buffer_map_.empty());
1724 struct v4l2_requestbuffers reqbufs;
1725 memset(&reqbufs, 0, sizeof(reqbufs));
1726 reqbufs.count = kInputBufferCount;
1727 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1728 reqbufs.memory = V4L2_MEMORY_MMAP;
1729 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1730 input_buffer_map_.resize(reqbufs.count);
1731 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1732 free_input_buffers_.push_back(i);
1734 // Query for the MEMORY_MMAP pointer.
1735 struct v4l2_plane planes[1];
1736 struct v4l2_buffer buffer;
1737 memset(&buffer, 0, sizeof(buffer));
1738 memset(planes, 0, sizeof(planes));
1739 buffer.index = i;
1740 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1741 buffer.memory = V4L2_MEMORY_MMAP;
1742 buffer.m.planes = planes;
1743 buffer.length = 1;
1744 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
1745 void* address = device_->Mmap(NULL,
1746 buffer.m.planes[0].length,
1747 PROT_READ | PROT_WRITE,
1748 MAP_SHARED,
1749 buffer.m.planes[0].m.mem_offset);
1750 if (address == MAP_FAILED) {
1751 PLOG(ERROR) << "CreateInputBuffers(): mmap() failed";
1752 return false;
1754 input_buffer_map_[i].address = address;
1755 input_buffer_map_[i].length = buffer.m.planes[0].length;
1758 return true;
1761 bool V4L2VideoDecodeAccelerator::SetupFormats() {
1762 // We always run this as we prepare to initialize.
1763 DCHECK_EQ(decoder_state_, kUninitialized);
1764 DCHECK(!input_streamon_);
1765 DCHECK(!output_streamon_);
1767 __u32 input_format_fourcc =
1768 V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, false);
1769 if (!input_format_fourcc) {
1770 NOTREACHED();
1771 return false;
1774 size_t input_size;
1775 gfx::Size max_resolution, min_resolution;
1776 device_->GetSupportedResolution(input_format_fourcc, &min_resolution,
1777 &max_resolution);
1778 if (max_resolution.width() > 1920 && max_resolution.height() > 1088)
1779 input_size = kInputBufferMaxSizeFor4k;
1780 else
1781 input_size = kInputBufferMaxSizeFor1080p;
1783 struct v4l2_format format;
1784 memset(&format, 0, sizeof(format));
1785 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1786 format.fmt.pix_mp.pixelformat = input_format_fourcc;
1787 format.fmt.pix_mp.plane_fmt[0].sizeimage = input_size;
1788 format.fmt.pix_mp.num_planes = 1;
1789 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1791 // We have to set up the format for output, because the driver may not allow
1792 // changing it once we start streaming; whether it can support our chosen
1793 // output format or not may depend on the input format.
1794 struct v4l2_fmtdesc fmtdesc;
1795 memset(&fmtdesc, 0, sizeof(fmtdesc));
1796 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1797 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
1798 if (device_->CanCreateEGLImageFrom(fmtdesc.pixelformat)) {
1799 output_format_fourcc_ = fmtdesc.pixelformat;
1800 break;
1802 ++fmtdesc.index;
1805 if (output_format_fourcc_ == 0) {
1806 LOG(ERROR) << "Could not find a usable output format";
1807 return false;
1810 // Just set the fourcc for output; resolution, etc., will come from the
1811 // driver once it extracts it from the stream.
1812 memset(&format, 0, sizeof(format));
1813 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1814 format.fmt.pix_mp.pixelformat = output_format_fourcc_;
1815 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1817 return true;
1820 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() {
1821 DVLOG(3) << "CreateOutputBuffers()";
1822 DCHECK(decoder_state_ == kInitialized ||
1823 decoder_state_ == kChangingResolution);
1824 DCHECK(!output_streamon_);
1825 DCHECK(output_buffer_map_.empty());
1827 // Number of output buffers we need.
1828 struct v4l2_control ctrl;
1829 memset(&ctrl, 0, sizeof(ctrl));
1830 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1831 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
1832 output_dpb_size_ = ctrl.value;
1834 // Output format setup in Initialize().
1836 // Allocate the output buffers.
1837 struct v4l2_requestbuffers reqbufs;
1838 memset(&reqbufs, 0, sizeof(reqbufs));
1839 reqbufs.count = output_dpb_size_ + kDpbOutputBufferExtraCount;
1840 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1841 reqbufs.memory = V4L2_MEMORY_MMAP;
1842 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1844 output_buffer_map_.resize(reqbufs.count);
1846 DVLOG(3) << "CreateOutputBuffers(): ProvidePictureBuffers(): "
1847 << "buffer_count=" << output_buffer_map_.size()
1848 << ", coded_size=" << coded_size_.ToString();
1849 child_task_runner_->PostTask(
1850 FROM_HERE, base::Bind(&Client::ProvidePictureBuffers, client_,
1851 output_buffer_map_.size(), coded_size_,
1852 device_->GetTextureTarget()));
1854 // Wait for the client to call AssignPictureBuffers() on the Child thread.
1855 // We do this, because if we continue decoding without finishing buffer
1856 // allocation, we may end up Resetting before AssignPictureBuffers arrives,
1857 // resulting in unnecessary complications and subtle bugs.
1858 // For example, if the client calls Decode(Input1), Reset(), Decode(Input2)
1859 // in a sequence, and Decode(Input1) results in us getting here and exiting
1860 // without waiting, we might end up running Reset{,Done}Task() before
1861 // AssignPictureBuffers is scheduled, thus cleaning up and pushing buffers
1862 // to the free_output_buffers_ map twice. If we somehow marked buffers as
1863 // not ready, we'd need special handling for restarting the second Decode
1864 // task and delaying it anyway.
1865 // Waiting here is not very costly and makes reasoning about different
1866 // situations much simpler.
1867 pictures_assigned_.Wait();
1869 Enqueue();
1870 return true;
1873 void V4L2VideoDecodeAccelerator::DestroyInputBuffers() {
1874 DVLOG(3) << "DestroyInputBuffers()";
1875 DCHECK(child_task_runner_->BelongsToCurrentThread());
1876 DCHECK(!input_streamon_);
1878 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1879 if (input_buffer_map_[i].address != NULL) {
1880 device_->Munmap(input_buffer_map_[i].address,
1881 input_buffer_map_[i].length);
1885 struct v4l2_requestbuffers reqbufs;
1886 memset(&reqbufs, 0, sizeof(reqbufs));
1887 reqbufs.count = 0;
1888 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1889 reqbufs.memory = V4L2_MEMORY_MMAP;
1890 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1892 input_buffer_map_.clear();
1893 free_input_buffers_.clear();
1896 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() {
1897 DVLOG(3) << "DestroyOutputBuffers()";
1898 DCHECK(child_task_runner_->BelongsToCurrentThread());
1899 DCHECK(!output_streamon_);
1900 bool success = true;
1902 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1903 OutputRecord& output_record = output_buffer_map_[i];
1905 if (output_record.egl_image != EGL_NO_IMAGE_KHR) {
1906 if (device_->DestroyEGLImage(egl_display_, output_record.egl_image) !=
1907 EGL_TRUE) {
1908 DVLOG(1) << __func__ << " DestroyEGLImage failed.";
1909 success = false;
1913 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1914 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1915 DVLOG(1) << __func__ << " eglDestroySyncKHR failed.";
1916 success = false;
1920 DVLOG(1) << "DestroyOutputBuffers(): dismissing PictureBuffer id="
1921 << output_record.picture_id;
1922 child_task_runner_->PostTask(
1923 FROM_HERE, base::Bind(&Client::DismissPictureBuffer, client_,
1924 output_record.picture_id));
1927 struct v4l2_requestbuffers reqbufs;
1928 memset(&reqbufs, 0, sizeof(reqbufs));
1929 reqbufs.count = 0;
1930 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1931 reqbufs.memory = V4L2_MEMORY_MMAP;
1932 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
1933 PLOG(ERROR) << "DestroyOutputBuffers() ioctl() failed: VIDIOC_REQBUFS";
1934 success = false;
1937 output_buffer_map_.clear();
1938 while (!free_output_buffers_.empty())
1939 free_output_buffers_.pop();
1941 return success;
1944 void V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers() {
1945 DCHECK(child_task_runner_->BelongsToCurrentThread());
1946 DVLOG(3) << "ResolutionChangeDestroyBuffers()";
1948 if (!DestroyOutputBuffers()) {
1949 LOG(ERROR) << __func__ << " Failed destroying output buffers.";
1950 NOTIFY_ERROR(PLATFORM_FAILURE);
1951 return;
1954 // Finish resolution change on decoder thread.
1955 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1956 &V4L2VideoDecodeAccelerator::FinishResolutionChange,
1957 base::Unretained(this)));
1960 void V4L2VideoDecodeAccelerator::SendPictureReady() {
1961 DVLOG(3) << "SendPictureReady()";
1962 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1963 bool resetting_or_flushing =
1964 (decoder_state_ == kResetting || decoder_flushing_);
1965 while (pending_picture_ready_.size() > 0) {
1966 bool cleared = pending_picture_ready_.front().cleared;
1967 const media::Picture& picture = pending_picture_ready_.front().picture;
1968 if (cleared && picture_clearing_count_ == 0) {
1969 // This picture is cleared. Post it to IO thread to reduce latency. This
1970 // should be the case after all pictures are cleared at the beginning.
1971 io_task_runner_->PostTask(
1972 FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture));
1973 pending_picture_ready_.pop();
1974 } else if (!cleared || resetting_or_flushing) {
1975 DVLOG(3) << "SendPictureReady()"
1976 << ". cleared=" << pending_picture_ready_.front().cleared
1977 << ", decoder_state_=" << decoder_state_
1978 << ", decoder_flushing_=" << decoder_flushing_
1979 << ", picture_clearing_count_=" << picture_clearing_count_;
1980 // If the picture is not cleared, post it to the child thread because it
1981 // has to be cleared in the child thread. A picture only needs to be
1982 // cleared once. If the decoder is resetting or flushing, send all
1983 // pictures to ensure PictureReady arrive before reset or flush done.
1984 child_task_runner_->PostTaskAndReply(
1985 FROM_HERE, base::Bind(&Client::PictureReady, client_, picture),
1986 // Unretained is safe. If Client::PictureReady gets to run, |this| is
1987 // alive. Destroy() will wait the decode thread to finish.
1988 base::Bind(&V4L2VideoDecodeAccelerator::PictureCleared,
1989 base::Unretained(this)));
1990 picture_clearing_count_++;
1991 pending_picture_ready_.pop();
1992 } else {
1993 // This picture is cleared. But some pictures are about to be cleared on
1994 // the child thread. To preserve the order, do not send this until those
1995 // pictures are cleared.
1996 break;
2001 void V4L2VideoDecodeAccelerator::PictureCleared() {
2002 DVLOG(3) << "PictureCleared(). clearing count=" << picture_clearing_count_;
2003 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
2004 DCHECK_GT(picture_clearing_count_, 0);
2005 picture_clearing_count_--;
2006 SendPictureReady();
2009 bool V4L2VideoDecodeAccelerator::IsResolutionChangeNecessary() {
2010 DVLOG(3) << "IsResolutionChangeNecessary() ";
2012 struct v4l2_control ctrl;
2013 memset(&ctrl, 0, sizeof(ctrl));
2014 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
2015 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
2016 if (ctrl.value != output_dpb_size_) {
2017 DVLOG(3)
2018 << "IsResolutionChangeNecessary(): Returning true since DPB mismatch ";
2019 return true;
2021 struct v4l2_format format;
2022 bool again = false;
2023 bool ret = GetFormatInfo(&format, nullptr, &again);
2024 if (!ret || again) {
2025 DVLOG(3) << "IsResolutionChangeNecessary(): GetFormatInfo() failed";
2026 return false;
2028 gfx::Size new_coded_size(base::checked_cast<int>(format.fmt.pix_mp.width),
2029 base::checked_cast<int>(format.fmt.pix_mp.height));
2030 if (coded_size_ != new_coded_size) {
2031 DVLOG(3) << "IsResolutionChangeNecessary(): Resolution change detected";
2032 return true;
2034 return false;
2037 } // namespace content