Respond with QuotaExceededError when IndexedDB has no disk space on open.
[chromium-blink-merge.git] / content / common / gpu / media / exynos_video_decode_accelerator.cc
blobedce5f3ef8bcb6b08fb4aadc7ccb1cc1c34c4c5c
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <dlfcn.h>
6 #include <errno.h>
7 #include <fcntl.h>
8 #include <linux/videodev2.h>
9 #include <poll.h>
10 #include <sys/eventfd.h>
11 #include <sys/ioctl.h>
12 #include <sys/mman.h>
14 #include "base/bind.h"
15 #include "base/debug/trace_event.h"
16 #include "base/memory/shared_memory.h"
17 #include "base/message_loop/message_loop.h"
18 #include "base/message_loop/message_loop_proxy.h"
19 #include "base/posix/eintr_wrapper.h"
20 #include "content/common/gpu/media/exynos_video_decode_accelerator.h"
21 #include "content/common/gpu/media/h264_parser.h"
22 #include "ui/gl/scoped_binders.h"
24 namespace content {
26 #define NOTIFY_ERROR(x) \
27 do { \
28 SetDecoderState(kError); \
29 DLOG(ERROR) << "calling NotifyError(): " << x; \
30 NotifyError(x); \
31 } while (0)
33 #define IOCTL_OR_ERROR_RETURN(fd, type, arg) \
34 do { \
35 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \
36 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
37 NOTIFY_ERROR(PLATFORM_FAILURE); \
38 return; \
39 } \
40 } while (0)
42 #define IOCTL_OR_ERROR_RETURN_FALSE(fd, type, arg) \
43 do { \
44 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \
45 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
46 NOTIFY_ERROR(PLATFORM_FAILURE); \
47 return false; \
48 } \
49 } while (0)
51 namespace {
53 // TODO(posciak): remove once we update linux-headers.
54 #ifndef V4L2_EVENT_RESOLUTION_CHANGE
55 #define V4L2_EVENT_RESOLUTION_CHANGE 5
56 #endif
58 const char kExynosMfcDevice[] = "/dev/mfc-dec";
59 const char kExynosGscDevice[] = "/dev/gsc1";
60 const char kMaliDriver[] = "libmali.so";
62 typedef EGLBoolean (*MaliEglImageGetBufferExtPhandleFunc)(EGLImageKHR, EGLint*,
63 void*);
65 void* libmali_handle = NULL;
66 MaliEglImageGetBufferExtPhandleFunc
67 mali_egl_image_get_buffer_ext_phandle = NULL;
68 } // anonymous namespace
70 struct ExynosVideoDecodeAccelerator::BitstreamBufferRef {
71 BitstreamBufferRef(
72 base::WeakPtr<Client>& client,
73 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
74 base::SharedMemory* shm,
75 size_t size,
76 int32 input_id);
77 ~BitstreamBufferRef();
78 const base::WeakPtr<Client> client;
79 const scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy;
80 const scoped_ptr<base::SharedMemory> shm;
81 const size_t size;
82 off_t bytes_used;
83 const int32 input_id;
86 struct ExynosVideoDecodeAccelerator::PictureBufferArrayRef {
87 PictureBufferArrayRef(EGLDisplay egl_display, size_t count);
88 ~PictureBufferArrayRef();
90 struct PictureBufferRef {
91 EGLImageKHR egl_image;
92 int egl_image_fd;
93 int32 client_id;
96 EGLDisplay const egl_display;
97 std::vector<PictureBufferRef> picture_buffers;
100 struct ExynosVideoDecodeAccelerator::EGLSyncKHRRef {
101 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
102 ~EGLSyncKHRRef();
103 EGLDisplay const egl_display;
104 EGLSyncKHR egl_sync;
107 ExynosVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
108 base::WeakPtr<Client>& client,
109 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
110 base::SharedMemory* shm, size_t size, int32 input_id)
111 : client(client),
112 client_message_loop_proxy(client_message_loop_proxy),
113 shm(shm),
114 size(size),
115 bytes_used(0),
116 input_id(input_id) {
119 ExynosVideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
120 if (input_id >= 0) {
121 client_message_loop_proxy->PostTask(FROM_HERE, base::Bind(
122 &Client::NotifyEndOfBitstreamBuffer, client, input_id));
126 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::PictureBufferArrayRef(
127 EGLDisplay egl_display, size_t count)
128 : egl_display(egl_display),
129 picture_buffers(count) {
130 for (size_t i = 0; i < picture_buffers.size(); ++i) {
131 PictureBufferRef& buffer = picture_buffers[i];
132 buffer.egl_image = EGL_NO_IMAGE_KHR;
133 buffer.egl_image_fd = -1;
134 buffer.client_id = -1;
138 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::~PictureBufferArrayRef() {
139 for (size_t i = 0; i < picture_buffers.size(); ++i) {
140 PictureBufferRef& buffer = picture_buffers[i];
141 if (buffer.egl_image != EGL_NO_IMAGE_KHR)
142 eglDestroyImageKHR(egl_display, buffer.egl_image);
143 if (buffer.egl_image_fd != -1)
144 HANDLE_EINTR(close(buffer.egl_image_fd));
148 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
149 EGLDisplay egl_display, EGLSyncKHR egl_sync)
150 : egl_display(egl_display),
151 egl_sync(egl_sync) {
154 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
155 if (egl_sync != EGL_NO_SYNC_KHR)
156 eglDestroySyncKHR(egl_display, egl_sync);
159 ExynosVideoDecodeAccelerator::MfcInputRecord::MfcInputRecord()
160 : at_device(false),
161 address(NULL),
162 length(0),
163 bytes_used(0),
164 input_id(-1) {
167 ExynosVideoDecodeAccelerator::MfcInputRecord::~MfcInputRecord() {
170 ExynosVideoDecodeAccelerator::MfcOutputRecord::MfcOutputRecord()
171 : at_device(false),
172 input_id(-1) {
173 bytes_used[0] = 0;
174 bytes_used[1] = 0;
175 address[0] = NULL;
176 address[1] = NULL;
177 length[0] = 0;
178 length[1] = 0;
181 ExynosVideoDecodeAccelerator::MfcOutputRecord::~MfcOutputRecord() {
184 ExynosVideoDecodeAccelerator::GscInputRecord::GscInputRecord()
185 : at_device(false),
186 mfc_output(-1) {
189 ExynosVideoDecodeAccelerator::GscInputRecord::~GscInputRecord() {
192 ExynosVideoDecodeAccelerator::GscOutputRecord::GscOutputRecord()
193 : at_device(false),
194 at_client(false),
195 fd(-1),
196 egl_image(EGL_NO_IMAGE_KHR),
197 egl_sync(EGL_NO_SYNC_KHR),
198 picture_id(-1) {
201 ExynosVideoDecodeAccelerator::GscOutputRecord::~GscOutputRecord() {
204 ExynosVideoDecodeAccelerator::ExynosVideoDecodeAccelerator(
205 EGLDisplay egl_display,
206 EGLContext egl_context,
207 Client* client,
208 const base::WeakPtr<Client>& io_client,
209 const base::Callback<bool(void)>& make_context_current,
210 const scoped_refptr<base::MessageLoopProxy>& io_message_loop_proxy)
211 : child_message_loop_proxy_(base::MessageLoopProxy::current()),
212 io_message_loop_proxy_(io_message_loop_proxy),
213 weak_this_(base::AsWeakPtr(this)),
214 client_ptr_factory_(client),
215 client_(client_ptr_factory_.GetWeakPtr()),
216 io_client_(io_client),
217 decoder_thread_("ExynosDecoderThread"),
218 decoder_state_(kUninitialized),
219 decoder_delay_bitstream_buffer_id_(-1),
220 decoder_current_input_buffer_(-1),
221 decoder_decode_buffer_tasks_scheduled_(0),
222 decoder_frames_at_client_(0),
223 decoder_flushing_(false),
224 resolution_change_pending_(false),
225 resolution_change_reset_pending_(false),
226 decoder_partial_frame_pending_(false),
227 mfc_fd_(-1),
228 mfc_input_streamon_(false),
229 mfc_input_buffer_queued_count_(0),
230 mfc_output_streamon_(false),
231 mfc_output_buffer_queued_count_(0),
232 mfc_output_buffer_pixelformat_(0),
233 mfc_output_dpb_size_(0),
234 gsc_fd_(-1),
235 gsc_input_streamon_(false),
236 gsc_input_buffer_queued_count_(0),
237 gsc_output_streamon_(false),
238 gsc_output_buffer_queued_count_(0),
239 device_poll_thread_("ExynosDevicePollThread"),
240 device_poll_interrupt_fd_(-1),
241 make_context_current_(make_context_current),
242 egl_display_(egl_display),
243 egl_context_(egl_context),
244 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN) {
247 ExynosVideoDecodeAccelerator::~ExynosVideoDecodeAccelerator() {
248 DCHECK(!decoder_thread_.IsRunning());
249 DCHECK(!device_poll_thread_.IsRunning());
251 if (device_poll_interrupt_fd_ != -1) {
252 HANDLE_EINTR(close(device_poll_interrupt_fd_));
253 device_poll_interrupt_fd_ = -1;
255 if (gsc_fd_ != -1) {
256 DestroyGscInputBuffers();
257 DestroyGscOutputBuffers();
258 HANDLE_EINTR(close(gsc_fd_));
259 gsc_fd_ = -1;
261 if (mfc_fd_ != -1) {
262 DestroyMfcInputBuffers();
263 DestroyMfcOutputBuffers();
264 HANDLE_EINTR(close(mfc_fd_));
265 mfc_fd_ = -1;
268 // These maps have members that should be manually destroyed, e.g. file
269 // descriptors, mmap() segments, etc.
270 DCHECK(mfc_input_buffer_map_.empty());
271 DCHECK(mfc_output_buffer_map_.empty());
272 DCHECK(gsc_input_buffer_map_.empty());
273 DCHECK(gsc_output_buffer_map_.empty());
276 bool ExynosVideoDecodeAccelerator::Initialize(
277 media::VideoCodecProfile profile) {
278 DVLOG(3) << "Initialize()";
279 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
280 DCHECK_EQ(decoder_state_, kUninitialized);
282 switch (profile) {
283 case media::H264PROFILE_BASELINE:
284 DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE";
285 break;
286 case media::H264PROFILE_MAIN:
287 DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN";
288 break;
289 case media::H264PROFILE_HIGH:
290 DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH";
291 break;
292 case media::VP8PROFILE_MAIN:
293 DVLOG(2) << "Initialize(): profile VP8PROFILE_MAIN";
294 break;
295 default:
296 DLOG(ERROR) << "Initialize(): unsupported profile=" << profile;
297 return false;
299 video_profile_ = profile;
301 static bool sandbox_initialized = PostSandboxInitialization();
302 if (!sandbox_initialized) {
303 DLOG(ERROR) << "Initialize(): PostSandboxInitialization() failed";
304 NOTIFY_ERROR(PLATFORM_FAILURE);
305 return false;
308 if (egl_display_ == EGL_NO_DISPLAY) {
309 DLOG(ERROR) << "Initialize(): could not get EGLDisplay";
310 NOTIFY_ERROR(PLATFORM_FAILURE);
311 return false;
314 if (egl_context_ == EGL_NO_CONTEXT) {
315 DLOG(ERROR) << "Initialize(): could not get EGLContext";
316 NOTIFY_ERROR(PLATFORM_FAILURE);
317 return false;
320 // We need the context to be initialized to query extensions.
321 if (!make_context_current_.Run()) {
322 DLOG(ERROR) << "Initialize(): could not make context current";
323 NOTIFY_ERROR(PLATFORM_FAILURE);
324 return false;
327 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
328 DLOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync";
329 NOTIFY_ERROR(PLATFORM_FAILURE);
330 return false;
333 // Open the video devices.
334 DVLOG(2) << "Initialize(): opening MFC device: " << kExynosMfcDevice;
335 mfc_fd_ = HANDLE_EINTR(open(kExynosMfcDevice,
336 O_RDWR | O_NONBLOCK | O_CLOEXEC));
337 if (mfc_fd_ == -1) {
338 DPLOG(ERROR) << "Initialize(): could not open MFC device: "
339 << kExynosMfcDevice;
340 NOTIFY_ERROR(PLATFORM_FAILURE);
341 return false;
343 DVLOG(2) << "Initialize(): opening GSC device: " << kExynosGscDevice;
344 gsc_fd_ = HANDLE_EINTR(open(kExynosGscDevice,
345 O_RDWR | O_NONBLOCK | O_CLOEXEC));
346 if (gsc_fd_ == -1) {
347 DPLOG(ERROR) << "Initialize(): could not open GSC device: "
348 << kExynosGscDevice;
349 NOTIFY_ERROR(PLATFORM_FAILURE);
350 return false;
353 // Create the interrupt fd.
354 DCHECK_EQ(device_poll_interrupt_fd_, -1);
355 device_poll_interrupt_fd_ = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC);
356 if (device_poll_interrupt_fd_ == -1) {
357 DPLOG(ERROR) << "Initialize(): eventfd() failed";
358 NOTIFY_ERROR(PLATFORM_FAILURE);
359 return false;
362 // Capabilities check.
363 struct v4l2_capability caps;
364 const __u32 kCapsRequired =
365 V4L2_CAP_VIDEO_CAPTURE_MPLANE |
366 V4L2_CAP_VIDEO_OUTPUT_MPLANE |
367 V4L2_CAP_STREAMING;
368 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYCAP, &caps);
369 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
370 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
371 ", caps check failed: 0x" << std::hex << caps.capabilities;
372 NOTIFY_ERROR(PLATFORM_FAILURE);
373 return false;
375 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QUERYCAP, &caps);
376 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
377 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
378 ", caps check failed: 0x" << std::hex << caps.capabilities;
379 NOTIFY_ERROR(PLATFORM_FAILURE);
380 return false;
383 if (!CreateMfcInputBuffers())
384 return false;
386 // MFC output format has to be setup before streaming starts.
387 struct v4l2_format format;
388 memset(&format, 0, sizeof(format));
389 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
390 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12MT_16X16;
391 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
393 // Subscribe to the resolution change event.
394 struct v4l2_event_subscription sub;
395 memset(&sub, 0, sizeof(sub));
396 sub.type = V4L2_EVENT_RESOLUTION_CHANGE;
397 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_SUBSCRIBE_EVENT, &sub);
399 // Initialize format-specific bits.
400 if (video_profile_ >= media::H264PROFILE_MIN &&
401 video_profile_ <= media::H264PROFILE_MAX) {
402 decoder_h264_parser_.reset(new content::H264Parser());
405 if (!decoder_thread_.Start()) {
406 DLOG(ERROR) << "Initialize(): decoder thread failed to start";
407 NOTIFY_ERROR(PLATFORM_FAILURE);
408 return false;
411 SetDecoderState(kInitialized);
413 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
414 &Client::NotifyInitializeDone, client_));
415 return true;
418 void ExynosVideoDecodeAccelerator::Decode(
419 const media::BitstreamBuffer& bitstream_buffer) {
420 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
421 << ", size=" << bitstream_buffer.size();
422 DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
424 // DecodeTask() will take care of running a DecodeBufferTask().
425 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
426 &ExynosVideoDecodeAccelerator::DecodeTask, base::Unretained(this),
427 bitstream_buffer));
430 void ExynosVideoDecodeAccelerator::AssignPictureBuffers(
431 const std::vector<media::PictureBuffer>& buffers) {
432 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size();
433 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
435 if (buffers.size() != gsc_output_buffer_map_.size()) {
436 DLOG(ERROR) << "AssignPictureBuffers(): Failed to provide requested picture"
437 " buffers. (Got " << buffers.size() << ", requested " <<
438 gsc_output_buffer_map_.size() << ")";
439 NOTIFY_ERROR(INVALID_ARGUMENT);
440 return;
443 for (size_t i = 0; i < buffers.size(); ++i) {
444 if (buffers[i].size() != frame_buffer_size_) {
445 DLOG(ERROR) << "AssignPictureBuffers(): invalid buffer size";
446 NOTIFY_ERROR(INVALID_ARGUMENT);
447 return;
451 if (!make_context_current_.Run()) {
452 DLOG(ERROR) << "AssignPictureBuffers(): could not make context current";
453 NOTIFY_ERROR(PLATFORM_FAILURE);
454 return;
457 scoped_ptr<PictureBufferArrayRef> pic_buffers_ref(
458 new PictureBufferArrayRef(egl_display_, buffers.size()));
460 const static EGLint kImageAttrs[] = {
461 EGL_IMAGE_PRESERVED_KHR, 0,
462 EGL_NONE,
464 Display* x_display = base::MessagePumpForUI::GetDefaultXDisplay();
465 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_2D, 0);
466 for (size_t i = 0; i < pic_buffers_ref->picture_buffers.size(); ++i) {
467 PictureBufferArrayRef::PictureBufferRef& buffer =
468 pic_buffers_ref->picture_buffers[i];
469 // Create the X pixmap and then create an EGLImageKHR from it, so we can
470 // get dma_buf backing.
471 Pixmap pixmap = XCreatePixmap(x_display, RootWindow(x_display, 0),
472 buffers[i].size().width(), buffers[i].size().height(), 32);
473 if (!pixmap) {
474 DLOG(ERROR) << "AssignPictureBuffers(): could not create X pixmap";
475 NOTIFY_ERROR(PLATFORM_FAILURE);
476 return;
478 glBindTexture(GL_TEXTURE_2D, buffers[i].texture_id());
479 EGLImageKHR egl_image = eglCreateImageKHR(
480 egl_display_, EGL_NO_CONTEXT, EGL_NATIVE_PIXMAP_KHR,
481 (EGLClientBuffer)pixmap, kImageAttrs);
482 // We can free the X pixmap immediately -- according to the
483 // EGL_KHR_image_base spec, the backing storage does not go away until the
484 // last referencing EGLImage is destroyed.
485 XFreePixmap(x_display, pixmap);
486 if (egl_image == EGL_NO_IMAGE_KHR) {
487 DLOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
488 NOTIFY_ERROR(PLATFORM_FAILURE);
489 return;
491 buffer.egl_image = egl_image;
492 int fd;
493 if (!mali_egl_image_get_buffer_ext_phandle(buffer.egl_image, NULL, &fd)) {
494 DLOG(ERROR) << "AssignPictureBuffers(): "
495 << "could not get EGLImageKHR dmabuf fd";
496 NOTIFY_ERROR(PLATFORM_FAILURE);
497 return;
499 buffer.egl_image_fd = fd;
500 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, egl_image);
501 buffer.client_id = buffers[i].id();
503 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
504 &ExynosVideoDecodeAccelerator::AssignPictureBuffersTask,
505 base::Unretained(this), base::Passed(&pic_buffers_ref)));
508 void ExynosVideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) {
509 DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id;
510 // Must be run on child thread, as we'll insert a sync in the EGL context.
511 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
513 if (!make_context_current_.Run()) {
514 DLOG(ERROR) << "ReusePictureBuffer(): could not make context current";
515 NOTIFY_ERROR(PLATFORM_FAILURE);
516 return;
519 EGLSyncKHR egl_sync =
520 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
521 if (egl_sync == EGL_NO_SYNC_KHR) {
522 DLOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed";
523 NOTIFY_ERROR(PLATFORM_FAILURE);
524 return;
527 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef(
528 egl_display_, egl_sync));
529 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
530 &ExynosVideoDecodeAccelerator::ReusePictureBufferTask,
531 base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref)));
534 void ExynosVideoDecodeAccelerator::Flush() {
535 DVLOG(3) << "Flush()";
536 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
537 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
538 &ExynosVideoDecodeAccelerator::FlushTask, base::Unretained(this)));
541 void ExynosVideoDecodeAccelerator::Reset() {
542 DVLOG(3) << "Reset()";
543 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
544 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
545 &ExynosVideoDecodeAccelerator::ResetTask, base::Unretained(this)));
548 void ExynosVideoDecodeAccelerator::Destroy() {
549 DVLOG(3) << "Destroy()";
550 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
552 // We're destroying; cancel all callbacks.
553 client_ptr_factory_.InvalidateWeakPtrs();
555 // If the decoder thread is running, destroy using posted task.
556 if (decoder_thread_.IsRunning()) {
557 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
558 &ExynosVideoDecodeAccelerator::DestroyTask, base::Unretained(this)));
559 // DestroyTask() will cause the decoder_thread_ to flush all tasks.
560 decoder_thread_.Stop();
561 } else {
562 // Otherwise, call the destroy task directly.
563 DestroyTask();
566 // Set to kError state just in case.
567 SetDecoderState(kError);
569 delete this;
572 bool ExynosVideoDecodeAccelerator::CanDecodeOnIOThread() { return true; }
574 // static
575 void ExynosVideoDecodeAccelerator::PreSandboxInitialization() {
576 DVLOG(3) << "PreSandboxInitialization()";
577 dlerror();
579 libmali_handle = dlopen(kMaliDriver, RTLD_LAZY | RTLD_LOCAL);
580 if (libmali_handle == NULL) {
581 DPLOG(ERROR) << "failed to dlopen() " << kMaliDriver << ": " << dlerror();
585 // static
586 bool ExynosVideoDecodeAccelerator::PostSandboxInitialization() {
587 DVLOG(3) << "PostSandboxInitialization()";
588 if (libmali_handle == NULL) {
589 DLOG(ERROR) << "PostSandboxInitialization(): no " << kMaliDriver
590 << " driver handle";
591 return false;
594 dlerror();
595 mali_egl_image_get_buffer_ext_phandle =
596 reinterpret_cast<MaliEglImageGetBufferExtPhandleFunc>(
597 dlsym(libmali_handle, "mali_egl_image_get_buffer_ext_phandle"));
598 if (mali_egl_image_get_buffer_ext_phandle == NULL) {
599 DPLOG(ERROR) << "PostSandboxInitialization(): failed to dlsym() "
600 << "mali_egl_image_get_buffer_ext_phandle: " << dlerror();
601 return false;
604 return true;
607 void ExynosVideoDecodeAccelerator::DecodeTask(
608 const media::BitstreamBuffer& bitstream_buffer) {
609 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_buffer.id();
610 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
611 DCHECK_NE(decoder_state_, kUninitialized);
612 TRACE_EVENT1("Video Decoder", "EVDA::DecodeTask", "input_id",
613 bitstream_buffer.id());
615 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
616 io_client_, io_message_loop_proxy_,
617 new base::SharedMemory(bitstream_buffer.handle(), true),
618 bitstream_buffer.size(), bitstream_buffer.id()));
619 if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
620 DLOG(ERROR) << "Decode(): could not map bitstream_buffer";
621 NOTIFY_ERROR(UNREADABLE_INPUT);
622 return;
624 DVLOG(3) << "Decode(): mapped to addr=" << bitstream_record->shm->memory();
626 if (decoder_state_ == kResetting || decoder_flushing_) {
627 // In the case that we're resetting or flushing, we need to delay decoding
628 // the BitstreamBuffers that come after the Reset() or Flush() call. When
629 // we're here, we know that this DecodeTask() was scheduled by a Decode()
630 // call that came after (in the client thread) the Reset() or Flush() call;
631 // thus set up the delay if necessary.
632 if (decoder_delay_bitstream_buffer_id_ == -1)
633 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
634 } else if (decoder_state_ == kError) {
635 DVLOG(2) << "DecodeTask(): early out: kError state";
636 return;
639 decoder_input_queue_.push_back(
640 linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
641 decoder_decode_buffer_tasks_scheduled_++;
642 DecodeBufferTask();
645 void ExynosVideoDecodeAccelerator::DecodeBufferTask() {
646 DVLOG(3) << "DecodeBufferTask()";
647 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
648 DCHECK_NE(decoder_state_, kUninitialized);
649 TRACE_EVENT0("Video Decoder", "EVDA::DecodeBufferTask");
651 decoder_decode_buffer_tasks_scheduled_--;
653 if (decoder_state_ == kResetting) {
654 DVLOG(2) << "DecodeBufferTask(): early out: kResetting state";
655 return;
656 } else if (decoder_state_ == kError) {
657 DVLOG(2) << "DecodeBufferTask(): early out: kError state";
658 return;
659 } else if (decoder_state_ == kChangingResolution) {
660 DVLOG(2) << "DecodeBufferTask(): early out: resolution change pending";
661 return;
664 if (decoder_current_bitstream_buffer_ == NULL) {
665 if (decoder_input_queue_.empty()) {
666 // We're waiting for a new buffer -- exit without scheduling a new task.
667 return;
669 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
670 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
671 // We're asked to delay decoding on this and subsequent buffers.
672 return;
675 // Setup to use the next buffer.
676 decoder_current_bitstream_buffer_.reset(buffer_ref.release());
677 decoder_input_queue_.pop_front();
678 DVLOG(3) << "DecodeBufferTask(): reading input_id="
679 << decoder_current_bitstream_buffer_->input_id
680 << ", addr=" << (decoder_current_bitstream_buffer_->shm ?
681 decoder_current_bitstream_buffer_->shm->memory() :
682 NULL)
683 << ", size=" << decoder_current_bitstream_buffer_->size;
685 bool schedule_task = false;
686 const size_t size = decoder_current_bitstream_buffer_->size;
687 size_t decoded_size = 0;
688 if (size == 0) {
689 const int32 input_id = decoder_current_bitstream_buffer_->input_id;
690 if (input_id >= 0) {
691 // This is a buffer queued from the client that has zero size. Skip.
692 schedule_task = true;
693 } else {
694 // This is a buffer of zero size, queued to flush the pipe. Flush.
695 DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(),
696 static_cast<base::SharedMemory*>(NULL));
697 // Enqueue a buffer guaranteed to be empty. To do that, we flush the
698 // current input, enqueue no data to the next frame, then flush that down.
699 schedule_task = true;
700 if (decoder_current_input_buffer_ != -1 &&
701 mfc_input_buffer_map_[decoder_current_input_buffer_].input_id !=
702 kFlushBufferId)
703 schedule_task = FlushInputFrame();
705 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
706 DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer";
707 decoder_partial_frame_pending_ = false;
708 schedule_task = true;
709 } else {
710 // If we failed to enqueue the empty buffer (due to pipeline
711 // backpressure), don't advance the bitstream buffer queue, and don't
712 // schedule the next task. This bitstream buffer queue entry will get
713 // reprocessed when the pipeline frees up.
714 schedule_task = false;
717 } else {
718 // This is a buffer queued from the client, with actual contents. Decode.
719 const uint8* const data =
720 reinterpret_cast<const uint8*>(
721 decoder_current_bitstream_buffer_->shm->memory()) +
722 decoder_current_bitstream_buffer_->bytes_used;
723 const size_t data_size =
724 decoder_current_bitstream_buffer_->size -
725 decoder_current_bitstream_buffer_->bytes_used;
726 if (!AdvanceFrameFragment(data, data_size, &decoded_size)) {
727 NOTIFY_ERROR(UNREADABLE_INPUT);
728 return;
730 // AdvanceFrameFragment should not return a size larger than the buffer
731 // size, even on invalid data.
732 CHECK_LE(decoded_size, data_size);
734 switch (decoder_state_) {
735 case kInitialized:
736 case kAfterReset:
737 schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
738 break;
739 case kDecoding:
740 schedule_task = DecodeBufferContinue(data, decoded_size);
741 break;
742 default:
743 NOTIFY_ERROR(ILLEGAL_STATE);
744 return;
747 if (decoder_state_ == kError) {
748 // Failed during decode.
749 return;
752 if (schedule_task) {
753 decoder_current_bitstream_buffer_->bytes_used += decoded_size;
754 if (decoder_current_bitstream_buffer_->bytes_used ==
755 decoder_current_bitstream_buffer_->size) {
756 // Our current bitstream buffer is done; return it.
757 int32 input_id = decoder_current_bitstream_buffer_->input_id;
758 DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id;
759 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
760 decoder_current_bitstream_buffer_.reset();
762 ScheduleDecodeBufferTaskIfNeeded();
766 bool ExynosVideoDecodeAccelerator::AdvanceFrameFragment(
767 const uint8* data,
768 size_t size,
769 size_t* endpos) {
770 if (video_profile_ >= media::H264PROFILE_MIN &&
771 video_profile_ <= media::H264PROFILE_MAX) {
772 // For H264, we need to feed HW one frame at a time. This is going to take
773 // some parsing of our input stream.
774 decoder_h264_parser_->SetStream(data, size);
775 content::H264NALU nalu;
776 content::H264Parser::Result result;
777 *endpos = 0;
779 // Keep on peeking the next NALs while they don't indicate a frame
780 // boundary.
781 for (;;) {
782 bool end_of_frame = false;
783 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
784 if (result == content::H264Parser::kInvalidStream ||
785 result == content::H264Parser::kUnsupportedStream)
786 return false;
787 if (result == content::H264Parser::kEOStream) {
788 // We've reached the end of the buffer before finding a frame boundary.
789 decoder_partial_frame_pending_ = true;
790 return true;
792 switch (nalu.nal_unit_type) {
793 case content::H264NALU::kNonIDRSlice:
794 case content::H264NALU::kIDRSlice:
795 if (nalu.size < 1)
796 return false;
797 // For these two, if the "first_mb_in_slice" field is zero, start a
798 // new frame and return. This field is Exp-Golomb coded starting on
799 // the eighth data bit of the NAL; a zero value is encoded with a
800 // leading '1' bit in the byte, which we can detect as the byte being
801 // (unsigned) greater than or equal to 0x80.
802 if (nalu.data[1] >= 0x80) {
803 end_of_frame = true;
804 break;
806 break;
807 case content::H264NALU::kSPS:
808 case content::H264NALU::kPPS:
809 case content::H264NALU::kEOSeq:
810 case content::H264NALU::kEOStream:
811 // These unconditionally signal a frame boundary.
812 end_of_frame = true;
813 break;
814 default:
815 // For all others, keep going.
816 break;
818 if (end_of_frame) {
819 if (!decoder_partial_frame_pending_ && *endpos == 0) {
820 // The frame was previously restarted, and we haven't filled the
821 // current frame with any contents yet. Start the new frame here and
822 // continue parsing NALs.
823 } else {
824 // The frame wasn't previously restarted and/or we have contents for
825 // the current frame; signal the start of a new frame here: we don't
826 // have a partial frame anymore.
827 decoder_partial_frame_pending_ = false;
828 return true;
831 *endpos = (nalu.data + nalu.size) - data;
833 NOTREACHED();
834 return false;
835 } else {
836 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN);
837 DCHECK_LE(video_profile_, media::VP8PROFILE_MAX);
838 // For VP8, we can just dump the entire buffer. No fragmentation needed,
839 // and we never return a partial frame.
840 *endpos = size;
841 decoder_partial_frame_pending_ = false;
842 return true;
846 void ExynosVideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
847 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
849 // If we're behind on tasks, schedule another one.
850 int buffers_to_decode = decoder_input_queue_.size();
851 if (decoder_current_bitstream_buffer_ != NULL)
852 buffers_to_decode++;
853 if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
854 decoder_decode_buffer_tasks_scheduled_++;
855 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
856 &ExynosVideoDecodeAccelerator::DecodeBufferTask,
857 base::Unretained(this)));
861 bool ExynosVideoDecodeAccelerator::DecodeBufferInitial(
862 const void* data, size_t size, size_t* endpos) {
863 DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size;
864 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
865 DCHECK_NE(decoder_state_, kUninitialized);
866 DCHECK_NE(decoder_state_, kDecoding);
867 DCHECK(!device_poll_thread_.IsRunning());
868 // Initial decode. We haven't been able to get output stream format info yet.
869 // Get it, and start decoding.
871 // Copy in and send to HW.
872 if (!AppendToInputFrame(data, size))
873 return false;
875 // If we only have a partial frame, don't flush and process yet.
876 if (decoder_partial_frame_pending_)
877 return true;
879 if (!FlushInputFrame())
880 return false;
882 // Recycle buffers.
883 DequeueMfc();
885 // Check and see if we have format info yet.
886 struct v4l2_format format;
887 bool again = false;
888 if (!GetFormatInfo(&format, &again))
889 return false;
891 if (again) {
892 // Need more stream to decode format, return true and schedule next buffer.
893 *endpos = size;
894 return true;
897 // Run this initialization only on first startup.
898 if (decoder_state_ == kInitialized) {
899 DVLOG(3) << "DecodeBufferInitial(): running initialization";
900 // Success! Setup our parameters.
901 if (!CreateBuffersForFormat(format))
902 return false;
904 // MFC expects to process the initial buffer once during stream init to
905 // configure stream parameters, but will not consume the steam data on that
906 // iteration. Subsequent iterations (including after reset) do not require
907 // the stream init step.
908 *endpos = 0;
909 } else {
910 *endpos = size;
913 // StartDevicePoll will raise the error if there is one.
914 if (!StartDevicePoll())
915 return false;
917 decoder_state_ = kDecoding;
918 ScheduleDecodeBufferTaskIfNeeded();
919 return true;
922 bool ExynosVideoDecodeAccelerator::DecodeBufferContinue(
923 const void* data, size_t size) {
924 DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size;
925 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
926 DCHECK_EQ(decoder_state_, kDecoding);
928 // Both of these calls will set kError state if they fail.
929 // Only flush the frame if it's complete.
930 return (AppendToInputFrame(data, size) &&
931 (decoder_partial_frame_pending_ || FlushInputFrame()));
934 bool ExynosVideoDecodeAccelerator::AppendToInputFrame(
935 const void* data, size_t size) {
936 DVLOG(3) << "AppendToInputFrame()";
937 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
938 DCHECK_NE(decoder_state_, kUninitialized);
939 DCHECK_NE(decoder_state_, kResetting);
940 DCHECK_NE(decoder_state_, kError);
941 // This routine can handle data == NULL and size == 0, which occurs when
942 // we queue an empty buffer for the purposes of flushing the pipe.
944 // Flush if we're too big
945 if (decoder_current_input_buffer_ != -1) {
946 MfcInputRecord& input_record =
947 mfc_input_buffer_map_[decoder_current_input_buffer_];
948 if (input_record.bytes_used + size > input_record.length) {
949 if (!FlushInputFrame())
950 return false;
951 decoder_current_input_buffer_ = -1;
955 // Try to get an available input buffer
956 if (decoder_current_input_buffer_ == -1) {
957 if (mfc_free_input_buffers_.empty()) {
958 // See if we can get more free buffers from HW
959 DequeueMfc();
960 if (mfc_free_input_buffers_.empty()) {
961 // Nope!
962 DVLOG(2) << "AppendToInputFrame(): stalled for input buffers";
963 return false;
966 decoder_current_input_buffer_ = mfc_free_input_buffers_.back();
967 mfc_free_input_buffers_.pop_back();
968 MfcInputRecord& input_record =
969 mfc_input_buffer_map_[decoder_current_input_buffer_];
970 DCHECK_EQ(input_record.bytes_used, 0);
971 DCHECK_EQ(input_record.input_id, -1);
972 DCHECK(decoder_current_bitstream_buffer_ != NULL);
973 input_record.input_id = decoder_current_bitstream_buffer_->input_id;
976 DCHECK(data != NULL || size == 0);
977 if (size == 0) {
978 // If we asked for an empty buffer, return now. We return only after
979 // getting the next input buffer, since we might actually want an empty
980 // input buffer for flushing purposes.
981 return true;
984 // Copy in to the buffer.
985 MfcInputRecord& input_record =
986 mfc_input_buffer_map_[decoder_current_input_buffer_];
987 if (size > input_record.length - input_record.bytes_used) {
988 LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring";
989 NOTIFY_ERROR(UNREADABLE_INPUT);
990 return false;
992 memcpy(
993 reinterpret_cast<uint8*>(input_record.address) + input_record.bytes_used,
994 data,
995 size);
996 input_record.bytes_used += size;
998 return true;
1001 bool ExynosVideoDecodeAccelerator::FlushInputFrame() {
1002 DVLOG(3) << "FlushInputFrame()";
1003 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1004 DCHECK_NE(decoder_state_, kUninitialized);
1005 DCHECK_NE(decoder_state_, kResetting);
1006 DCHECK_NE(decoder_state_, kError);
1008 if (decoder_current_input_buffer_ == -1)
1009 return true;
1011 MfcInputRecord& input_record =
1012 mfc_input_buffer_map_[decoder_current_input_buffer_];
1013 DCHECK_NE(input_record.input_id, -1);
1014 DCHECK(input_record.input_id != kFlushBufferId ||
1015 input_record.bytes_used == 0);
1016 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
1017 // got from the client. We can skip it if it is empty.
1018 // * if input_id < 0 (should be kFlushBufferId in this case), this input
1019 // buffer was prompted by a flush buffer, and should be queued even when
1020 // empty.
1021 if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
1022 input_record.input_id = -1;
1023 mfc_free_input_buffers_.push_back(decoder_current_input_buffer_);
1024 decoder_current_input_buffer_ = -1;
1025 return true;
1028 // Queue it to MFC.
1029 mfc_input_ready_queue_.push_back(decoder_current_input_buffer_);
1030 decoder_current_input_buffer_ = -1;
1031 DVLOG(3) << "FlushInputFrame(): submitting input_id="
1032 << input_record.input_id;
1033 // Kick the MFC once since there's new available input for it.
1034 EnqueueMfc();
1036 return (decoder_state_ != kError);
1039 void ExynosVideoDecodeAccelerator::AssignPictureBuffersTask(
1040 scoped_ptr<PictureBufferArrayRef> pic_buffers) {
1041 DVLOG(3) << "AssignPictureBuffersTask()";
1042 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1043 DCHECK_NE(decoder_state_, kUninitialized);
1044 TRACE_EVENT0("Video Decoder", "EVDA::AssignPictureBuffersTask");
1046 // We run AssignPictureBuffersTask even if we're in kResetting.
1047 if (decoder_state_ == kError) {
1048 DVLOG(2) << "AssignPictureBuffersTask(): early out: kError state";
1049 return;
1052 DCHECK_EQ(pic_buffers->picture_buffers.size(), gsc_output_buffer_map_.size());
1053 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
1054 // We should be blank right now.
1055 GscOutputRecord& output_record = gsc_output_buffer_map_[i];
1056 DCHECK_EQ(output_record.fd, -1);
1057 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
1058 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1059 DCHECK_EQ(output_record.picture_id, -1);
1060 PictureBufferArrayRef::PictureBufferRef& buffer =
1061 pic_buffers->picture_buffers[i];
1062 output_record.fd = buffer.egl_image_fd;
1063 output_record.egl_image = buffer.egl_image;
1064 output_record.picture_id = buffer.client_id;
1066 // Take ownership of the EGLImage and fd.
1067 buffer.egl_image = EGL_NO_IMAGE_KHR;
1068 buffer.egl_image_fd = -1;
1069 // And add this buffer to the free list.
1070 gsc_free_output_buffers_.push_back(i);
1073 // We got buffers! Kick the GSC.
1074 EnqueueGsc();
1076 if (decoder_state_ == kChangingResolution)
1077 ResumeAfterResolutionChange();
1080 void ExynosVideoDecodeAccelerator::ServiceDeviceTask(bool mfc_event_pending) {
1081 DVLOG(3) << "ServiceDeviceTask()";
1082 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1083 DCHECK_NE(decoder_state_, kUninitialized);
1084 DCHECK_NE(decoder_state_, kInitialized);
1085 DCHECK_NE(decoder_state_, kAfterReset);
1086 TRACE_EVENT0("Video Decoder", "EVDA::ServiceDeviceTask");
1088 if (decoder_state_ == kResetting) {
1089 DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state";
1090 return;
1091 } else if (decoder_state_ == kError) {
1092 DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
1093 return;
1094 } else if (decoder_state_ == kChangingResolution) {
1095 DVLOG(2) << "ServiceDeviceTask(): early out: kChangingResolution state";
1096 return;
1099 if (mfc_event_pending)
1100 DequeueMfcEvents();
1101 DequeueMfc();
1102 DequeueGsc();
1103 EnqueueMfc();
1104 EnqueueGsc();
1106 // Clear the interrupt fd.
1107 if (!ClearDevicePollInterrupt())
1108 return;
1110 unsigned int poll_fds = 0;
1111 // Add MFC fd, if we should poll on it.
1112 // MFC can be polled as soon as either input or output buffers are queued.
1113 if (mfc_input_buffer_queued_count_ + mfc_output_buffer_queued_count_ > 0)
1114 poll_fds |= kPollMfc;
1115 // Add GSC fd, if we should poll on it.
1116 // GSC has to wait until both input and output buffers are queued.
1117 if (gsc_input_buffer_queued_count_ > 0 && gsc_output_buffer_queued_count_ > 0)
1118 poll_fds |= kPollGsc;
1120 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
1121 // so either:
1122 // * device_poll_thread_ is running normally
1123 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
1124 // shut it down, in which case we're either in kResetting or kError states
1125 // respectively, and we should have early-outed already.
1126 DCHECK(device_poll_thread_.message_loop());
1127 // Queue the DevicePollTask() now.
1128 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1129 &ExynosVideoDecodeAccelerator::DevicePollTask,
1130 base::Unretained(this),
1131 poll_fds));
1133 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC["
1134 << decoder_input_queue_.size() << "->"
1135 << mfc_input_ready_queue_.size() << "] => MFC["
1136 << mfc_free_input_buffers_.size() << "+"
1137 << mfc_input_buffer_queued_count_ << "/"
1138 << mfc_input_buffer_map_.size() << "->"
1139 << mfc_free_output_buffers_.size() << "+"
1140 << mfc_output_buffer_queued_count_ << "/"
1141 << mfc_output_buffer_map_.size() << "] => "
1142 << mfc_output_gsc_input_queue_.size() << " => GSC["
1143 << gsc_free_input_buffers_.size() << "+"
1144 << gsc_input_buffer_queued_count_ << "/"
1145 << gsc_input_buffer_map_.size() << "->"
1146 << gsc_free_output_buffers_.size() << "+"
1147 << gsc_output_buffer_queued_count_ << "/"
1148 << gsc_output_buffer_map_.size() << "] => VDA["
1149 << decoder_frames_at_client_ << "]";
1151 ScheduleDecodeBufferTaskIfNeeded();
1152 StartResolutionChangeIfNeeded();
1155 void ExynosVideoDecodeAccelerator::EnqueueMfc() {
1156 DVLOG(3) << "EnqueueMfc()";
1157 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1158 DCHECK_NE(decoder_state_, kUninitialized);
1159 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueMfc");
1161 // Drain the pipe of completed decode buffers.
1162 const int old_mfc_inputs_queued = mfc_input_buffer_queued_count_;
1163 while (!mfc_input_ready_queue_.empty()) {
1164 if (!EnqueueMfcInputRecord())
1165 return;
1167 if (old_mfc_inputs_queued == 0 && mfc_input_buffer_queued_count_ != 0) {
1168 // We just started up a previously empty queue.
1169 // Queue state changed; signal interrupt.
1170 if (!SetDevicePollInterrupt())
1171 return;
1172 // Start VIDIOC_STREAMON if we haven't yet.
1173 if (!mfc_input_streamon_) {
1174 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1175 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
1176 mfc_input_streamon_ = true;
1180 // Enqueue all the MFC outputs we can.
1181 const int old_mfc_outputs_queued = mfc_output_buffer_queued_count_;
1182 while (!mfc_free_output_buffers_.empty()) {
1183 if (!EnqueueMfcOutputRecord())
1184 return;
1186 if (old_mfc_outputs_queued == 0 && mfc_output_buffer_queued_count_ != 0) {
1187 // We just started up a previously empty queue.
1188 // Queue state changed; signal interrupt.
1189 if (!SetDevicePollInterrupt())
1190 return;
1191 // Start VIDIOC_STREAMON if we haven't yet.
1192 if (!mfc_output_streamon_) {
1193 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1194 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
1195 mfc_output_streamon_ = true;
1200 void ExynosVideoDecodeAccelerator::DequeueMfcEvents() {
1201 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1202 DCHECK_EQ(decoder_state_, kDecoding);
1203 DVLOG(3) << "DequeueMfcEvents()";
1205 struct v4l2_event ev;
1206 memset(&ev, 0, sizeof(ev));
1208 while (ioctl(mfc_fd_, VIDIOC_DQEVENT, &ev) == 0) {
1209 if (ev.type == V4L2_EVENT_RESOLUTION_CHANGE) {
1210 DVLOG(3) << "DequeueMfcEvents(): got resolution change event.";
1211 DCHECK(!resolution_change_pending_);
1212 resolution_change_pending_ = true;
1213 } else {
1214 DLOG(FATAL) << "DequeueMfcEvents(): got an event (" << ev.type
1215 << ") we haven't subscribed to.";
1220 void ExynosVideoDecodeAccelerator::DequeueMfc() {
1221 DVLOG(3) << "DequeueMfc()";
1222 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1223 DCHECK_NE(decoder_state_, kUninitialized);
1224 TRACE_EVENT0("Video Decoder", "EVDA::DequeueMfc");
1226 // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free
1227 // list.
1228 struct v4l2_buffer dqbuf;
1229 struct v4l2_plane planes[2];
1230 while (mfc_input_buffer_queued_count_ > 0) {
1231 DCHECK(mfc_input_streamon_);
1232 memset(&dqbuf, 0, sizeof(dqbuf));
1233 memset(planes, 0, sizeof(planes));
1234 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1235 dqbuf.memory = V4L2_MEMORY_MMAP;
1236 dqbuf.m.planes = planes;
1237 dqbuf.length = 1;
1238 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1239 if (errno == EAGAIN) {
1240 // EAGAIN if we're just out of buffers to dequeue.
1241 break;
1243 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
1244 NOTIFY_ERROR(PLATFORM_FAILURE);
1245 return;
1247 MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index];
1248 DCHECK(input_record.at_device);
1249 mfc_free_input_buffers_.push_back(dqbuf.index);
1250 input_record.at_device = false;
1251 input_record.bytes_used = 0;
1252 input_record.input_id = -1;
1253 mfc_input_buffer_queued_count_--;
1256 // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and queue to the
1257 // completed queue.
1258 while (mfc_output_buffer_queued_count_ > 0) {
1259 DCHECK(mfc_output_streamon_);
1260 memset(&dqbuf, 0, sizeof(dqbuf));
1261 memset(planes, 0, sizeof(planes));
1262 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1263 dqbuf.memory = V4L2_MEMORY_MMAP;
1264 dqbuf.m.planes = planes;
1265 dqbuf.length = 2;
1266 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1267 if (errno == EAGAIN) {
1268 // EAGAIN if we're just out of buffers to dequeue.
1269 break;
1271 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
1272 NOTIFY_ERROR(PLATFORM_FAILURE);
1273 return;
1275 MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index];
1276 DCHECK(output_record.at_device);
1277 output_record.at_device = false;
1278 output_record.bytes_used[0] = dqbuf.m.planes[0].bytesused;
1279 output_record.bytes_used[1] = dqbuf.m.planes[1].bytesused;
1280 if (output_record.bytes_used[0] + output_record.bytes_used[1] == 0) {
1281 // This is an empty output buffer returned as part of a flush.
1282 mfc_free_output_buffers_.push_back(dqbuf.index);
1283 output_record.input_id = -1;
1284 } else {
1285 // This is an output buffer with contents to pass down the pipe.
1286 mfc_output_gsc_input_queue_.push_back(dqbuf.index);
1287 output_record.input_id = dqbuf.timestamp.tv_sec;
1288 DCHECK(output_record.input_id >= 0);
1289 DVLOG(3) << "DequeueMfc(): dequeued input_id=" << output_record.input_id;
1290 // We don't count this output buffer dequeued yet, or add it to the free
1291 // list, as it has data GSC needs to process.
1293 // We have new frames in mfc_output_gsc_input_queue_. Kick the pipe.
1294 SetDevicePollInterrupt();
1296 mfc_output_buffer_queued_count_--;
1299 NotifyFlushDoneIfNeeded();
1302 void ExynosVideoDecodeAccelerator::EnqueueGsc() {
1303 DVLOG(3) << "EnqueueGsc()";
1304 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1305 DCHECK_NE(decoder_state_, kUninitialized);
1306 DCHECK_NE(decoder_state_, kInitialized);
1307 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueGsc");
1309 // Drain the pipe of completed MFC output buffers.
1310 const int old_gsc_inputs_queued = gsc_input_buffer_queued_count_;
1311 while (!mfc_output_gsc_input_queue_.empty() &&
1312 !gsc_free_input_buffers_.empty()) {
1313 if (!EnqueueGscInputRecord())
1314 return;
1316 if (old_gsc_inputs_queued == 0 && gsc_input_buffer_queued_count_ != 0) {
1317 // We just started up a previously empty queue.
1318 // Queue state changed; signal interrupt.
1319 if (!SetDevicePollInterrupt())
1320 return;
1321 // Start VIDIOC_STREAMON if we haven't yet.
1322 if (!gsc_input_streamon_) {
1323 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1324 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
1325 gsc_input_streamon_ = true;
1329 if (gsc_input_buffer_queued_count_ != 0 &&
1330 gsc_output_buffer_queued_count_ == 0 &&
1331 !gsc_free_output_buffers_.empty()) {
1332 const int old_gsc_outputs_queued = gsc_output_buffer_queued_count_;
1333 if (!EnqueueGscOutputRecord())
1334 return;
1335 if (old_gsc_outputs_queued == 0 && gsc_output_buffer_queued_count_ != 0) {
1336 // We just started up a previously empty queue.
1337 // Queue state changed; signal interrupt.
1338 if (!SetDevicePollInterrupt())
1339 return;
1340 // Start VIDIOC_STREAMON if we haven't yet.
1341 if (!gsc_output_streamon_) {
1342 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1343 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
1344 gsc_output_streamon_ = true;
1348 // Bug check: GSC is liable to race conditions if more than one buffer is
1349 // simultaneously queued.
1350 DCHECK_GE(1, gsc_output_buffer_queued_count_);
1353 void ExynosVideoDecodeAccelerator::DequeueGsc() {
1354 DVLOG(3) << "DequeueGsc()";
1355 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1356 DCHECK_NE(decoder_state_, kUninitialized);
1357 DCHECK_NE(decoder_state_, kInitialized);
1358 DCHECK_NE(decoder_state_, kAfterReset);
1359 TRACE_EVENT0("Video Decoder", "EVDA::DequeueGsc");
1361 // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free
1362 // list. Also recycle the corresponding MFC output buffers at this time.
1363 struct v4l2_buffer dqbuf;
1364 struct v4l2_plane planes[2];
1365 while (gsc_input_buffer_queued_count_ > 0) {
1366 DCHECK(gsc_input_streamon_);
1367 memset(&dqbuf, 0, sizeof(dqbuf));
1368 memset(planes, 0, sizeof(planes));
1369 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1370 dqbuf.memory = V4L2_MEMORY_DMABUF;
1371 dqbuf.m.planes = planes;
1372 dqbuf.length = 2;
1373 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1374 if (errno == EAGAIN) {
1375 // EAGAIN if we're just out of buffers to dequeue.
1376 break;
1378 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
1379 NOTIFY_ERROR(PLATFORM_FAILURE);
1380 return;
1382 GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index];
1383 MfcOutputRecord& output_record =
1384 mfc_output_buffer_map_[input_record.mfc_output];
1385 DCHECK(input_record.at_device);
1386 gsc_free_input_buffers_.push_back(dqbuf.index);
1387 mfc_free_output_buffers_.push_back(input_record.mfc_output);
1388 input_record.at_device = false;
1389 input_record.mfc_output = -1;
1390 output_record.input_id = -1;
1391 gsc_input_buffer_queued_count_--;
1394 // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and send them off to
1395 // the client. Don't recycle to its free list yet -- we can't do that until
1396 // ReusePictureBuffer() returns it to us.
1397 while (gsc_output_buffer_queued_count_ > 0) {
1398 DCHECK(gsc_output_streamon_);
1399 memset(&dqbuf, 0, sizeof(dqbuf));
1400 memset(planes, 0, sizeof(planes));
1401 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1402 dqbuf.memory = V4L2_MEMORY_DMABUF;
1403 dqbuf.m.planes = planes;
1404 dqbuf.length = 1;
1405 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1406 if (errno == EAGAIN) {
1407 // EAGAIN if we're just out of buffers to dequeue.
1408 break;
1410 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
1411 NOTIFY_ERROR(PLATFORM_FAILURE);
1412 return;
1414 GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index];
1415 DCHECK(output_record.at_device);
1416 DCHECK(!output_record.at_client);
1417 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1418 output_record.at_device = false;
1419 output_record.at_client = true;
1420 gsc_output_buffer_queued_count_--;
1421 DVLOG(3) << "DequeueGsc(): returning input_id=" << dqbuf.timestamp.tv_sec
1422 << " as picture_id=" << output_record.picture_id;
1423 io_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1424 &Client::PictureReady, io_client_, media::Picture(
1425 output_record.picture_id, dqbuf.timestamp.tv_sec)));
1426 decoder_frames_at_client_++;
1429 NotifyFlushDoneIfNeeded();
1432 bool ExynosVideoDecodeAccelerator::EnqueueMfcInputRecord() {
1433 DVLOG(3) << "EnqueueMfcInputRecord()";
1434 DCHECK(!mfc_input_ready_queue_.empty());
1436 // Enqueue a MFC input (VIDEO_OUTPUT) buffer.
1437 const int buffer = mfc_input_ready_queue_.back();
1438 MfcInputRecord& input_record = mfc_input_buffer_map_[buffer];
1439 DCHECK(!input_record.at_device);
1440 struct v4l2_buffer qbuf;
1441 struct v4l2_plane qbuf_plane;
1442 memset(&qbuf, 0, sizeof(qbuf));
1443 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1444 qbuf.index = buffer;
1445 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1446 qbuf.timestamp.tv_sec = input_record.input_id;
1447 qbuf.memory = V4L2_MEMORY_MMAP;
1448 qbuf.m.planes = &qbuf_plane;
1449 qbuf.m.planes[0].bytesused = input_record.bytes_used;
1450 qbuf.length = 1;
1451 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
1452 mfc_input_ready_queue_.pop_back();
1453 input_record.at_device = true;
1454 mfc_input_buffer_queued_count_++;
1455 DVLOG(3) << "EnqueueMfcInputRecord(): enqueued input_id="
1456 << input_record.input_id;
1457 return true;
1460 bool ExynosVideoDecodeAccelerator::EnqueueMfcOutputRecord() {
1461 DVLOG(3) << "EnqueueMfcOutputRecord()";
1462 DCHECK(!mfc_free_output_buffers_.empty());
1464 // Enqueue a MFC output (VIDEO_CAPTURE) buffer.
1465 const int buffer = mfc_free_output_buffers_.back();
1466 MfcOutputRecord& output_record = mfc_output_buffer_map_[buffer];
1467 DCHECK(!output_record.at_device);
1468 DCHECK_EQ(output_record.input_id, -1);
1469 struct v4l2_buffer qbuf;
1470 struct v4l2_plane qbuf_planes[2];
1471 memset(&qbuf, 0, sizeof(qbuf));
1472 memset(qbuf_planes, 0, sizeof(qbuf_planes));
1473 qbuf.index = buffer;
1474 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1475 qbuf.memory = V4L2_MEMORY_MMAP;
1476 qbuf.m.planes = qbuf_planes;
1477 qbuf.length = 2;
1478 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
1479 mfc_free_output_buffers_.pop_back();
1480 output_record.at_device = true;
1481 mfc_output_buffer_queued_count_++;
1482 return true;
1485 bool ExynosVideoDecodeAccelerator::EnqueueGscInputRecord() {
1486 DVLOG(3) << "EnqueueGscInputRecord()";
1487 DCHECK(!gsc_free_input_buffers_.empty());
1489 // Enqueue a GSC input (VIDEO_OUTPUT) buffer for a complete MFC output
1490 // (VIDEO_CAPTURE) buffer.
1491 const int mfc_buffer = mfc_output_gsc_input_queue_.front();
1492 const int gsc_buffer = gsc_free_input_buffers_.back();
1493 MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer];
1494 DCHECK(!output_record.at_device);
1495 GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer];
1496 DCHECK(!input_record.at_device);
1497 DCHECK_EQ(input_record.mfc_output, -1);
1498 struct v4l2_buffer qbuf;
1499 struct v4l2_plane qbuf_planes[2];
1500 memset(&qbuf, 0, sizeof(qbuf));
1501 memset(qbuf_planes, 0, sizeof(qbuf_planes));
1502 qbuf.index = gsc_buffer;
1503 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1504 qbuf.timestamp.tv_sec = output_record.input_id;
1505 qbuf.memory = V4L2_MEMORY_USERPTR;
1506 qbuf.m.planes = qbuf_planes;
1507 qbuf.m.planes[0].bytesused = output_record.bytes_used[0];
1508 qbuf.m.planes[0].length = mfc_output_buffer_size_[0];
1509 qbuf.m.planes[0].m.userptr = (unsigned long)output_record.address[0];
1510 qbuf.m.planes[1].bytesused = output_record.bytes_used[1];
1511 qbuf.m.planes[1].length = mfc_output_buffer_size_[1];
1512 qbuf.m.planes[1].m.userptr = (unsigned long)output_record.address[1];
1513 qbuf.length = 2;
1514 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
1515 mfc_output_gsc_input_queue_.pop_front();
1516 gsc_free_input_buffers_.pop_back();
1517 input_record.at_device = true;
1518 input_record.mfc_output = mfc_buffer;
1519 output_record.bytes_used[0] = 0;
1520 output_record.bytes_used[1] = 0;
1521 gsc_input_buffer_queued_count_++;
1522 DVLOG(3) << "EnqueueGscInputRecord(): enqueued input_id="
1523 << output_record.input_id;
1524 return true;
1527 bool ExynosVideoDecodeAccelerator::EnqueueGscOutputRecord() {
1528 DVLOG(3) << "EnqueueGscOutputRecord()";
1529 DCHECK(!gsc_free_output_buffers_.empty());
1531 // Enqueue a GSC output (VIDEO_CAPTURE) buffer.
1532 const int buffer = gsc_free_output_buffers_.front();
1533 GscOutputRecord& output_record = gsc_output_buffer_map_[buffer];
1534 DCHECK(!output_record.at_device);
1535 DCHECK(!output_record.at_client);
1536 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1537 TRACE_EVENT0(
1538 "Video Decoder",
1539 "EVDA::EnqueueGscOutputRecord: eglClientWaitSyncKHR");
1540 // If we have to wait for completion, wait. Note that
1541 // gsc_free_output_buffers_ is a FIFO queue, so we always wait on the
1542 // buffer that has been in the queue the longest.
1543 eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
1544 EGL_FOREVER_KHR);
1545 eglDestroySyncKHR(egl_display_, output_record.egl_sync);
1546 output_record.egl_sync = EGL_NO_SYNC_KHR;
1548 struct v4l2_buffer qbuf;
1549 struct v4l2_plane qbuf_plane;
1550 memset(&qbuf, 0, sizeof(qbuf));
1551 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1552 qbuf.index = buffer;
1553 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1554 qbuf.memory = V4L2_MEMORY_DMABUF;
1555 qbuf.m.planes = &qbuf_plane;
1556 qbuf.m.planes[0].m.fd = output_record.fd;
1557 qbuf.length = 1;
1558 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
1559 gsc_free_output_buffers_.pop_front();
1560 output_record.at_device = true;
1561 gsc_output_buffer_queued_count_++;
1562 return true;
1565 void ExynosVideoDecodeAccelerator::ReusePictureBufferTask(
1566 int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1567 DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id="
1568 << picture_buffer_id;
1569 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1570 TRACE_EVENT0("Video Decoder", "EVDA::ReusePictureBufferTask");
1572 // We run ReusePictureBufferTask even if we're in kResetting.
1573 if (decoder_state_ == kError) {
1574 DVLOG(2) << "ReusePictureBufferTask(): early out: kError state";
1575 return;
1578 if (decoder_state_ == kChangingResolution) {
1579 DVLOG(2) << "ReusePictureBufferTask(): early out: kChangingResolution";
1580 return;
1583 size_t index;
1584 for (index = 0; index < gsc_output_buffer_map_.size(); ++index)
1585 if (gsc_output_buffer_map_[index].picture_id == picture_buffer_id)
1586 break;
1588 if (index >= gsc_output_buffer_map_.size()) {
1589 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not found";
1590 NOTIFY_ERROR(INVALID_ARGUMENT);
1591 return;
1594 GscOutputRecord& output_record = gsc_output_buffer_map_[index];
1595 if (output_record.at_device || !output_record.at_client) {
1596 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable";
1597 NOTIFY_ERROR(INVALID_ARGUMENT);
1598 return;
1601 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1602 output_record.at_client = false;
1603 output_record.egl_sync = egl_sync_ref->egl_sync;
1604 gsc_free_output_buffers_.push_back(index);
1605 decoder_frames_at_client_--;
1606 // Take ownership of the EGLSync.
1607 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1608 // We got a buffer back, so kick the GSC.
1609 EnqueueGsc();
1612 void ExynosVideoDecodeAccelerator::FlushTask() {
1613 DVLOG(3) << "FlushTask()";
1614 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1615 TRACE_EVENT0("Video Decoder", "EVDA::FlushTask");
1617 // Flush outstanding buffers.
1618 if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) {
1619 // There's nothing in the pipe, so return done immediately.
1620 DVLOG(3) << "FlushTask(): returning flush";
1621 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1622 &Client::NotifyFlushDone, client_));
1623 return;
1624 } else if (decoder_state_ == kError) {
1625 DVLOG(2) << "FlushTask(): early out: kError state";
1626 return;
1629 // We don't support stacked flushing.
1630 DCHECK(!decoder_flushing_);
1632 // Queue up an empty buffer -- this triggers the flush.
1633 decoder_input_queue_.push_back(linked_ptr<BitstreamBufferRef>(
1634 new BitstreamBufferRef(io_client_, io_message_loop_proxy_, NULL, 0,
1635 kFlushBufferId)));
1636 decoder_flushing_ = true;
1638 ScheduleDecodeBufferTaskIfNeeded();
1641 void ExynosVideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1642 if (!decoder_flushing_)
1643 return;
1645 // Pipeline is empty when:
1646 // * Decoder input queue is empty of non-delayed buffers.
1647 // * There is no currently filling input buffer.
1648 // * MFC input holding queue is empty.
1649 // * All MFC input (VIDEO_OUTPUT) buffers are returned.
1650 // * MFC -> GSC holding queue is empty.
1651 // * All GSC input (VIDEO_OUTPUT) buffers are returned.
1652 if (!decoder_input_queue_.empty()) {
1653 if (decoder_input_queue_.front()->input_id !=
1654 decoder_delay_bitstream_buffer_id_)
1655 return;
1657 if (decoder_current_input_buffer_ != -1)
1658 return;
1659 if ((mfc_input_ready_queue_.size() +
1660 mfc_input_buffer_queued_count_ + mfc_output_gsc_input_queue_.size() +
1661 gsc_input_buffer_queued_count_ + gsc_output_buffer_queued_count_ ) != 0)
1662 return;
1664 // TODO(posciak): crbug.com/270039. MFC requires a streamoff-streamon
1665 // sequence after flush to continue, even if we are not resetting. This would
1666 // make sense, because we don't really want to resume from a non-resume point
1667 // (e.g. not from an IDR) if we are flushed.
1668 // MSE player however triggers a Flush() on chunk end, but never Reset(). One
1669 // could argue either way, or even say that Flush() is not needed/harmful when
1670 // transitioning to next chunk.
1671 // For now, do the streamoff-streamon cycle to satisfy MFC and not freeze when
1672 // doing MSE. This should be harmless otherwise.
1673 if (!StopDevicePoll(false))
1674 return;
1676 if (!StartDevicePoll())
1677 return;
1679 decoder_delay_bitstream_buffer_id_ = -1;
1680 decoder_flushing_ = false;
1681 DVLOG(3) << "NotifyFlushDoneIfNeeded(): returning flush";
1682 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1683 &Client::NotifyFlushDone, client_));
1685 // While we were flushing, we early-outed DecodeBufferTask()s.
1686 ScheduleDecodeBufferTaskIfNeeded();
1689 void ExynosVideoDecodeAccelerator::ResetTask() {
1690 DVLOG(3) << "ResetTask()";
1691 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1692 TRACE_EVENT0("Video Decoder", "EVDA::ResetTask");
1694 if (decoder_state_ == kError) {
1695 DVLOG(2) << "ResetTask(): early out: kError state";
1696 return;
1699 // If we are in the middle of switching resolutions, postpone reset until
1700 // it's done. We don't have to worry about timing of this wrt to decoding,
1701 // because MFC input pipe is already stopped if we are changing resolution.
1702 // We will come back here after we are done with the resolution change.
1703 DCHECK(!resolution_change_reset_pending_);
1704 if (resolution_change_pending_ || decoder_state_ == kChangingResolution) {
1705 resolution_change_reset_pending_ = true;
1706 return;
1709 // We stop streaming and clear buffer tracking info (not preserving
1710 // MFC inputs).
1711 // StopDevicePoll() unconditionally does _not_ destroy buffers, however.
1712 if (!StopDevicePoll(false))
1713 return;
1715 DequeueMfcEvents();
1717 resolution_change_pending_ = false;
1718 decoder_current_bitstream_buffer_.reset();
1719 decoder_input_queue_.clear();
1721 decoder_current_input_buffer_ = -1;
1723 // If we were flushing, we'll never return any more BitstreamBuffers or
1724 // PictureBuffers; they have all been dropped and returned by now.
1725 NotifyFlushDoneIfNeeded();
1727 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening
1728 // jobs will early-out in the kResetting state.
1729 decoder_state_ = kResetting;
1730 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1731 &ExynosVideoDecodeAccelerator::ResetDoneTask, base::Unretained(this)));
1734 void ExynosVideoDecodeAccelerator::ResetDoneTask() {
1735 DVLOG(3) << "ResetDoneTask()";
1736 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1737 TRACE_EVENT0("Video Decoder", "EVDA::ResetDoneTask");
1739 if (decoder_state_ == kError) {
1740 DVLOG(2) << "ResetDoneTask(): early out: kError state";
1741 return;
1744 // Reset format-specific bits.
1745 if (video_profile_ >= media::H264PROFILE_MIN &&
1746 video_profile_ <= media::H264PROFILE_MAX) {
1747 decoder_h264_parser_.reset(new content::H264Parser());
1750 // Jobs drained, we're finished resetting.
1751 DCHECK_EQ(decoder_state_, kResetting);
1752 decoder_state_ = kAfterReset;
1753 decoder_partial_frame_pending_ = false;
1754 decoder_delay_bitstream_buffer_id_ = -1;
1755 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1756 &Client::NotifyResetDone, client_));
1758 // While we were resetting, we early-outed DecodeBufferTask()s.
1759 ScheduleDecodeBufferTaskIfNeeded();
1762 void ExynosVideoDecodeAccelerator::DestroyTask() {
1763 DVLOG(3) << "DestroyTask()";
1764 TRACE_EVENT0("Video Decoder", "EVDA::DestroyTask");
1766 // DestroyTask() should run regardless of decoder_state_.
1768 // Stop streaming and the device_poll_thread_.
1769 StopDevicePoll(false);
1771 decoder_current_bitstream_buffer_.reset();
1772 decoder_current_input_buffer_ = -1;
1773 decoder_decode_buffer_tasks_scheduled_ = 0;
1774 decoder_frames_at_client_ = 0;
1775 decoder_input_queue_.clear();
1776 decoder_flushing_ = false;
1778 // Set our state to kError. Just in case.
1779 decoder_state_ = kError;
1782 bool ExynosVideoDecodeAccelerator::StartDevicePoll() {
1783 DVLOG(3) << "StartDevicePoll()";
1784 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1785 DCHECK(!device_poll_thread_.IsRunning());
1787 // Start up the device poll thread and schedule its first DevicePollTask().
1788 if (!device_poll_thread_.Start()) {
1789 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
1790 NOTIFY_ERROR(PLATFORM_FAILURE);
1791 return false;
1793 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1794 &ExynosVideoDecodeAccelerator::DevicePollTask,
1795 base::Unretained(this),
1796 0));
1798 return true;
1801 bool ExynosVideoDecodeAccelerator::StopDevicePoll(bool keep_mfc_input_state) {
1802 DVLOG(3) << "StopDevicePoll()";
1803 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1805 // Signal the DevicePollTask() to stop, and stop the device poll thread.
1806 if (!SetDevicePollInterrupt())
1807 return false;
1808 device_poll_thread_.Stop();
1809 // Clear the interrupt now, to be sure.
1810 if (!ClearDevicePollInterrupt())
1811 return false;
1813 // Stop streaming.
1814 if (!keep_mfc_input_state) {
1815 if (mfc_input_streamon_) {
1816 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1817 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
1819 mfc_input_streamon_ = false;
1821 if (mfc_output_streamon_) {
1822 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1823 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
1825 mfc_output_streamon_ = false;
1826 if (gsc_input_streamon_) {
1827 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1828 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
1830 gsc_input_streamon_ = false;
1831 if (gsc_output_streamon_) {
1832 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1833 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
1835 gsc_output_streamon_ = false;
1837 // Reset all our accounting info.
1838 if (!keep_mfc_input_state) {
1839 mfc_input_ready_queue_.clear();
1840 mfc_free_input_buffers_.clear();
1841 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
1842 mfc_free_input_buffers_.push_back(i);
1843 mfc_input_buffer_map_[i].at_device = false;
1844 mfc_input_buffer_map_[i].bytes_used = 0;
1845 mfc_input_buffer_map_[i].input_id = -1;
1847 mfc_input_buffer_queued_count_ = 0;
1849 mfc_free_output_buffers_.clear();
1850 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
1851 mfc_free_output_buffers_.push_back(i);
1852 mfc_output_buffer_map_[i].at_device = false;
1853 mfc_output_buffer_map_[i].input_id = -1;
1855 mfc_output_buffer_queued_count_ = 0;
1856 mfc_output_gsc_input_queue_.clear();
1857 gsc_free_input_buffers_.clear();
1858 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) {
1859 gsc_free_input_buffers_.push_back(i);
1860 gsc_input_buffer_map_[i].at_device = false;
1861 gsc_input_buffer_map_[i].mfc_output = -1;
1863 gsc_input_buffer_queued_count_ = 0;
1864 gsc_free_output_buffers_.clear();
1865 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
1866 // Only mark those free that aren't being held by the VDA.
1867 if (!gsc_output_buffer_map_[i].at_client) {
1868 gsc_free_output_buffers_.push_back(i);
1869 gsc_output_buffer_map_[i].at_device = false;
1872 gsc_output_buffer_queued_count_ = 0;
1874 DVLOG(3) << "StopDevicePoll(): device poll stopped";
1875 return true;
1878 bool ExynosVideoDecodeAccelerator::SetDevicePollInterrupt() {
1879 DVLOG(3) << "SetDevicePollInterrupt()";
1880 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1882 const uint64 buf = 1;
1883 if (HANDLE_EINTR(write(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) {
1884 DPLOG(ERROR) << "SetDevicePollInterrupt(): write() failed";
1885 NOTIFY_ERROR(PLATFORM_FAILURE);
1886 return false;
1888 return true;
1891 bool ExynosVideoDecodeAccelerator::ClearDevicePollInterrupt() {
1892 DVLOG(3) << "ClearDevicePollInterrupt()";
1893 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1895 uint64 buf;
1896 if (HANDLE_EINTR(read(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) {
1897 if (errno == EAGAIN) {
1898 // No interrupt flag set, and we're reading nonblocking. Not an error.
1899 return true;
1900 } else {
1901 DPLOG(ERROR) << "ClearDevicePollInterrupt(): read() failed";
1902 NOTIFY_ERROR(PLATFORM_FAILURE);
1903 return false;
1906 return true;
1909 void ExynosVideoDecodeAccelerator::StartResolutionChangeIfNeeded() {
1910 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1911 DCHECK_EQ(decoder_state_, kDecoding);
1913 if (!resolution_change_pending_)
1914 return;
1916 if (!mfc_output_gsc_input_queue_.empty() ||
1917 gsc_input_buffer_queued_count_ + gsc_output_buffer_queued_count_ > 0) {
1918 DVLOG(3) << "StartResolutionChangeIfNeeded(): waiting for GSC to finish.";
1919 return;
1922 DVLOG(3) << "No more work for GSC, initiate resolution change";
1924 // Keep MFC input queue.
1925 if (!StopDevicePoll(true))
1926 return;
1928 decoder_state_ = kChangingResolution;
1929 DCHECK(resolution_change_pending_);
1930 resolution_change_pending_ = false;
1932 // Post a task to clean up buffers on child thread. This will also ensure
1933 // that we won't accept ReusePictureBuffer() anymore after that.
1934 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1935 &ExynosVideoDecodeAccelerator::ResolutionChangeDestroyBuffers,
1936 weak_this_));
1939 void ExynosVideoDecodeAccelerator::FinishResolutionChange() {
1940 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1941 DVLOG(3) << "FinishResolutionChange()";
1943 if (decoder_state_ == kError) {
1944 DVLOG(2) << "FinishResolutionChange(): early out: kError state";
1945 return;
1948 struct v4l2_format format;
1949 bool again;
1950 bool ret = GetFormatInfo(&format, &again);
1951 if (!ret || again) {
1952 DVLOG(3) << "Couldn't get format information after resolution change";
1953 NOTIFY_ERROR(PLATFORM_FAILURE);
1954 return;
1957 if (!CreateBuffersForFormat(format)) {
1958 DVLOG(3) << "Couldn't reallocate buffers after resolution change";
1959 NOTIFY_ERROR(PLATFORM_FAILURE);
1960 return;
1963 // From here we stay in kChangingResolution and wait for
1964 // AssignPictureBuffers() before we can resume.
1967 void ExynosVideoDecodeAccelerator::ResumeAfterResolutionChange() {
1968 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1969 DVLOG(3) << "ResumeAfterResolutionChange()";
1971 decoder_state_ = kDecoding;
1973 if (resolution_change_reset_pending_) {
1974 resolution_change_reset_pending_ = false;
1975 ResetTask();
1976 return;
1979 if (!StartDevicePoll())
1980 return;
1982 EnqueueMfc();
1983 // Gsc will get enqueued in AssignPictureBuffersTask().
1984 ScheduleDecodeBufferTaskIfNeeded();
1987 void ExynosVideoDecodeAccelerator::DevicePollTask(unsigned int poll_fds) {
1988 DVLOG(3) << "DevicePollTask()";
1989 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
1990 TRACE_EVENT0("Video Decoder", "EVDA::DevicePollTask");
1992 // This routine just polls the set of device fds, and schedules a
1993 // ServiceDeviceTask() on decoder_thread_ when processing needs to occur.
1994 // Other threads may notify this task to return early by writing to
1995 // device_poll_interrupt_fd_.
1996 struct pollfd pollfds[3];
1997 nfds_t nfds;
1998 int mfc_pollfd = -1;
2000 // Add device_poll_interrupt_fd_;
2001 pollfds[0].fd = device_poll_interrupt_fd_;
2002 pollfds[0].events = POLLIN | POLLERR;
2003 nfds = 1;
2005 if (poll_fds & kPollMfc) {
2006 DVLOG(3) << "DevicePollTask(): adding MFC to poll() set";
2007 pollfds[nfds].fd = mfc_fd_;
2008 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR | POLLPRI;
2009 mfc_pollfd = nfds;
2010 nfds++;
2012 // Add GSC fd, if we should poll on it.
2013 // GSC has to wait until both input and output buffers are queued.
2014 if (poll_fds & kPollGsc) {
2015 DVLOG(3) << "DevicePollTask(): adding GSC to poll() set";
2016 pollfds[nfds].fd = gsc_fd_;
2017 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR;
2018 nfds++;
2021 // Poll it!
2022 if (HANDLE_EINTR(poll(pollfds, nfds, -1)) == -1) {
2023 DPLOG(ERROR) << "DevicePollTask(): poll() failed";
2024 NOTIFY_ERROR(PLATFORM_FAILURE);
2025 return;
2028 bool mfc_event_pending = (mfc_pollfd != -1 &&
2029 pollfds[mfc_pollfd].revents & POLLPRI);
2031 // All processing should happen on ServiceDeviceTask(), since we shouldn't
2032 // touch decoder state from this thread.
2033 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
2034 &ExynosVideoDecodeAccelerator::ServiceDeviceTask,
2035 base::Unretained(this), mfc_event_pending));
2038 void ExynosVideoDecodeAccelerator::NotifyError(Error error) {
2039 DVLOG(2) << "NotifyError()";
2041 if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
2042 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
2043 &ExynosVideoDecodeAccelerator::NotifyError, weak_this_, error));
2044 return;
2047 if (client_) {
2048 client_->NotifyError(error);
2049 client_ptr_factory_.InvalidateWeakPtrs();
2053 void ExynosVideoDecodeAccelerator::SetDecoderState(State state) {
2054 DVLOG(3) << "SetDecoderState(): state=" << state;
2056 // We can touch decoder_state_ only if this is the decoder thread or the
2057 // decoder thread isn't running.
2058 if (decoder_thread_.message_loop() != NULL &&
2059 decoder_thread_.message_loop() != base::MessageLoop::current()) {
2060 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
2061 &ExynosVideoDecodeAccelerator::SetDecoderState,
2062 base::Unretained(this), state));
2063 } else {
2064 decoder_state_ = state;
2068 bool ExynosVideoDecodeAccelerator::GetFormatInfo(struct v4l2_format* format,
2069 bool* again) {
2070 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
2072 *again = false;
2073 memset(format, 0, sizeof(*format));
2074 format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2075 if (HANDLE_EINTR(ioctl(mfc_fd_, VIDIOC_G_FMT, format)) != 0) {
2076 if (errno == EINVAL) {
2077 // EINVAL means we haven't seen sufficient stream to decode the format.
2078 *again = true;
2079 return true;
2080 } else {
2081 DPLOG(ERROR) << "DecodeBufferInitial(): ioctl() failed: VIDIOC_G_FMT";
2082 NOTIFY_ERROR(PLATFORM_FAILURE);
2083 return false;
2087 return true;
2090 bool ExynosVideoDecodeAccelerator::CreateBuffersForFormat(
2091 const struct v4l2_format& format) {
2092 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
2093 CHECK_EQ(format.fmt.pix_mp.num_planes, 2);
2094 frame_buffer_size_.SetSize(
2095 format.fmt.pix_mp.width, format.fmt.pix_mp.height);
2096 mfc_output_buffer_size_[0] = format.fmt.pix_mp.plane_fmt[0].sizeimage;
2097 mfc_output_buffer_size_[1] = format.fmt.pix_mp.plane_fmt[1].sizeimage;
2098 mfc_output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat;
2099 DCHECK_EQ(mfc_output_buffer_pixelformat_, V4L2_PIX_FMT_NV12MT_16X16);
2100 DVLOG(3) << "CreateBuffersForFormat(): new resolution: "
2101 << frame_buffer_size_.ToString();
2103 if (!CreateMfcOutputBuffers() || !CreateGscInputBuffers() ||
2104 !CreateGscOutputBuffers())
2105 return false;
2107 return true;
2110 bool ExynosVideoDecodeAccelerator::CreateMfcInputBuffers() {
2111 DVLOG(3) << "CreateMfcInputBuffers()";
2112 // We always run this as we prepare to initialize.
2113 DCHECK_EQ(decoder_state_, kUninitialized);
2114 DCHECK(!mfc_input_streamon_);
2115 DCHECK(mfc_input_buffer_map_.empty());
2117 __u32 pixelformat = 0;
2118 if (video_profile_ >= media::H264PROFILE_MIN &&
2119 video_profile_ <= media::H264PROFILE_MAX) {
2120 pixelformat = V4L2_PIX_FMT_H264;
2121 } else if (video_profile_ >= media::VP8PROFILE_MIN &&
2122 video_profile_ <= media::VP8PROFILE_MAX) {
2123 pixelformat = V4L2_PIX_FMT_VP8;
2124 } else {
2125 NOTREACHED();
2128 struct v4l2_format format;
2129 memset(&format, 0, sizeof(format));
2130 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2131 format.fmt.pix_mp.pixelformat = pixelformat;
2132 format.fmt.pix_mp.plane_fmt[0].sizeimage = kMfcInputBufferMaxSize;
2133 format.fmt.pix_mp.num_planes = 1;
2134 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
2136 struct v4l2_requestbuffers reqbufs;
2137 memset(&reqbufs, 0, sizeof(reqbufs));
2138 reqbufs.count = kMfcInputBufferCount;
2139 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2140 reqbufs.memory = V4L2_MEMORY_MMAP;
2141 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
2142 mfc_input_buffer_map_.resize(reqbufs.count);
2143 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
2144 mfc_free_input_buffers_.push_back(i);
2146 // Query for the MEMORY_MMAP pointer.
2147 struct v4l2_plane planes[1];
2148 struct v4l2_buffer buffer;
2149 memset(&buffer, 0, sizeof(buffer));
2150 memset(planes, 0, sizeof(planes));
2151 buffer.index = i;
2152 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2153 buffer.memory = V4L2_MEMORY_MMAP;
2154 buffer.m.planes = planes;
2155 buffer.length = 1;
2156 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer);
2157 void* address = mmap(NULL, buffer.m.planes[0].length,
2158 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_,
2159 buffer.m.planes[0].m.mem_offset);
2160 if (address == MAP_FAILED) {
2161 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed";
2162 return false;
2164 mfc_input_buffer_map_[i].address = address;
2165 mfc_input_buffer_map_[i].length = buffer.m.planes[0].length;
2168 return true;
2171 bool ExynosVideoDecodeAccelerator::CreateMfcOutputBuffers() {
2172 DVLOG(3) << "CreateMfcOutputBuffers()";
2173 DCHECK(decoder_state_ == kInitialized ||
2174 decoder_state_ == kChangingResolution);
2175 DCHECK(!mfc_output_streamon_);
2176 DCHECK(mfc_output_buffer_map_.empty());
2178 // Number of MFC output buffers we need.
2179 struct v4l2_control ctrl;
2180 memset(&ctrl, 0, sizeof(ctrl));
2181 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
2182 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_G_CTRL, &ctrl);
2183 mfc_output_dpb_size_ = ctrl.value;
2185 // Output format setup in Initialize().
2187 // Allocate the output buffers.
2188 struct v4l2_requestbuffers reqbufs;
2189 memset(&reqbufs, 0, sizeof(reqbufs));
2190 reqbufs.count = mfc_output_dpb_size_ + kDpbOutputBufferExtraCount;
2191 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2192 reqbufs.memory = V4L2_MEMORY_MMAP;
2193 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
2195 // Fill our free-buffers list, and create DMABUFs from them.
2196 mfc_output_buffer_map_.resize(reqbufs.count);
2197 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
2198 mfc_free_output_buffers_.push_back(i);
2200 // Query for the MEMORY_MMAP pointer.
2201 struct v4l2_plane planes[2];
2202 struct v4l2_buffer buffer;
2203 memset(&buffer, 0, sizeof(buffer));
2204 memset(planes, 0, sizeof(planes));
2205 buffer.index = i;
2206 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2207 buffer.memory = V4L2_MEMORY_MMAP;
2208 buffer.m.planes = planes;
2209 buffer.length = 2;
2210 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer);
2212 // Get their user memory for GSC input.
2213 for (int j = 0; j < 2; ++j) {
2214 void* address = mmap(NULL, buffer.m.planes[j].length,
2215 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_,
2216 buffer.m.planes[j].m.mem_offset);
2217 if (address == MAP_FAILED) {
2218 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed";
2219 return false;
2221 mfc_output_buffer_map_[i].address[j] = address;
2222 mfc_output_buffer_map_[i].length[j] = buffer.m.planes[j].length;
2226 return true;
2229 bool ExynosVideoDecodeAccelerator::CreateGscInputBuffers() {
2230 DVLOG(3) << "CreateGscInputBuffers()";
2231 DCHECK(decoder_state_ == kInitialized ||
2232 decoder_state_ == kChangingResolution);
2233 DCHECK(!gsc_input_streamon_);
2234 DCHECK(gsc_input_buffer_map_.empty());
2236 struct v4l2_format format;
2237 memset(&format, 0, sizeof(format));
2238 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2239 format.fmt.pix_mp.width = frame_buffer_size_.width();
2240 format.fmt.pix_mp.height = frame_buffer_size_.height();
2241 format.fmt.pix_mp.pixelformat = mfc_output_buffer_pixelformat_;
2242 format.fmt.pix_mp.plane_fmt[0].sizeimage = mfc_output_buffer_size_[0];
2243 format.fmt.pix_mp.plane_fmt[1].sizeimage = mfc_output_buffer_size_[1];
2244 // NV12MT_16X16 is a tiled format for which bytesperline doesn't make too much
2245 // sense. Convention seems to be to assume 8bpp for these tiled formats.
2246 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width();
2247 format.fmt.pix_mp.plane_fmt[1].bytesperline = frame_buffer_size_.width();
2248 format.fmt.pix_mp.num_planes = 2;
2249 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
2251 struct v4l2_control control;
2252 memset(&control, 0, sizeof(control));
2253 control.id = V4L2_CID_ROTATE;
2254 control.value = 0;
2255 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2257 memset(&control, 0, sizeof(control));
2258 control.id = V4L2_CID_HFLIP;
2259 control.value = 0;
2260 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2262 memset(&control, 0, sizeof(control));
2263 control.id = V4L2_CID_VFLIP;
2264 control.value = 0;
2265 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2267 memset(&control, 0, sizeof(control));
2268 control.id = V4L2_CID_GLOBAL_ALPHA;
2269 control.value = 255;
2270 if (HANDLE_EINTR(ioctl(gsc_fd_, VIDIOC_S_CTRL, &control)) != 0) {
2271 memset(&control, 0, sizeof(control));
2272 control.id = V4L2_CID_ALPHA_COMPONENT;
2273 control.value = 255;
2274 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2277 struct v4l2_requestbuffers reqbufs;
2278 memset(&reqbufs, 0, sizeof(reqbufs));
2279 reqbufs.count = kGscInputBufferCount;
2280 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2281 reqbufs.memory = V4L2_MEMORY_USERPTR;
2282 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
2284 gsc_input_buffer_map_.resize(reqbufs.count);
2285 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) {
2286 gsc_free_input_buffers_.push_back(i);
2287 gsc_input_buffer_map_[i].mfc_output = -1;
2290 return true;
2293 bool ExynosVideoDecodeAccelerator::CreateGscOutputBuffers() {
2294 DVLOG(3) << "CreateGscOutputBuffers()";
2295 DCHECK(decoder_state_ == kInitialized ||
2296 decoder_state_ == kChangingResolution);
2297 DCHECK(!gsc_output_streamon_);
2298 DCHECK(gsc_output_buffer_map_.empty());
2300 // GSC outputs into the EGLImages we create from the textures we are
2301 // assigned. Assume RGBA8888 format.
2302 struct v4l2_format format;
2303 memset(&format, 0, sizeof(format));
2304 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2305 format.fmt.pix_mp.width = frame_buffer_size_.width();
2306 format.fmt.pix_mp.height = frame_buffer_size_.height();
2307 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB32;
2308 format.fmt.pix_mp.plane_fmt[0].sizeimage =
2309 frame_buffer_size_.width() * frame_buffer_size_.height() * 4;
2310 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width() * 4;
2311 format.fmt.pix_mp.num_planes = 1;
2312 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
2314 struct v4l2_requestbuffers reqbufs;
2315 memset(&reqbufs, 0, sizeof(reqbufs));
2316 reqbufs.count = mfc_output_dpb_size_ + kDpbOutputBufferExtraCount;
2317 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2318 reqbufs.memory = V4L2_MEMORY_DMABUF;
2319 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
2321 // We don't actually fill in the freelist or the map here. That happens once
2322 // we have actual usable buffers, after AssignPictureBuffers();
2323 gsc_output_buffer_map_.resize(reqbufs.count);
2325 DVLOG(3) << "CreateGscOutputBuffers(): ProvidePictureBuffers(): "
2326 << "buffer_count=" << gsc_output_buffer_map_.size()
2327 << ", width=" << frame_buffer_size_.width()
2328 << ", height=" << frame_buffer_size_.height();
2329 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
2330 &Client::ProvidePictureBuffers, client_, gsc_output_buffer_map_.size(),
2331 gfx::Size(frame_buffer_size_.width(), frame_buffer_size_.height()),
2332 GL_TEXTURE_2D));
2334 return true;
2337 void ExynosVideoDecodeAccelerator::DestroyMfcInputBuffers() {
2338 DVLOG(3) << "DestroyMfcInputBuffers()";
2339 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2340 DCHECK(!mfc_input_streamon_);
2342 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
2343 if (mfc_input_buffer_map_[i].address != NULL) {
2344 munmap(mfc_input_buffer_map_[i].address,
2345 mfc_input_buffer_map_[i].length);
2349 struct v4l2_requestbuffers reqbufs;
2350 memset(&reqbufs, 0, sizeof(reqbufs));
2351 reqbufs.count = 0;
2352 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2353 reqbufs.memory = V4L2_MEMORY_MMAP;
2354 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2355 DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2357 mfc_input_buffer_map_.clear();
2358 mfc_free_input_buffers_.clear();
2361 void ExynosVideoDecodeAccelerator::DestroyMfcOutputBuffers() {
2362 DVLOG(3) << "DestroyMfcOutputBuffers()";
2363 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2364 DCHECK(!mfc_output_streamon_);
2366 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
2367 if (mfc_output_buffer_map_[i].address[0] != NULL)
2368 munmap(mfc_output_buffer_map_[i].address[0],
2369 mfc_output_buffer_map_[i].length[0]);
2370 if (mfc_output_buffer_map_[i].address[1] != NULL)
2371 munmap(mfc_output_buffer_map_[i].address[1],
2372 mfc_output_buffer_map_[i].length[1]);
2375 struct v4l2_requestbuffers reqbufs;
2376 memset(&reqbufs, 0, sizeof(reqbufs));
2377 reqbufs.count = 0;
2378 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2379 reqbufs.memory = V4L2_MEMORY_MMAP;
2380 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2381 DPLOG(ERROR) << "DestroyMfcOutputBuffers() ioctl() failed: VIDIOC_REQBUFS";
2383 mfc_output_buffer_map_.clear();
2384 mfc_free_output_buffers_.clear();
2387 void ExynosVideoDecodeAccelerator::DestroyGscInputBuffers() {
2388 DVLOG(3) << "DestroyGscInputBuffers()";
2389 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2390 DCHECK(!gsc_input_streamon_);
2392 struct v4l2_requestbuffers reqbufs;
2393 memset(&reqbufs, 0, sizeof(reqbufs));
2394 reqbufs.count = 0;
2395 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2396 reqbufs.memory = V4L2_MEMORY_DMABUF;
2397 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2398 DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2400 gsc_input_buffer_map_.clear();
2401 gsc_free_input_buffers_.clear();
2404 void ExynosVideoDecodeAccelerator::DestroyGscOutputBuffers() {
2405 DVLOG(3) << "DestroyGscOutputBuffers()";
2406 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2407 DCHECK(!gsc_output_streamon_);
2409 if (gsc_output_buffer_map_.size() != 0) {
2410 if (!make_context_current_.Run())
2411 DLOG(ERROR) << "DestroyGscOutputBuffers(): "
2412 << "could not make context current";
2414 size_t i = 0;
2415 do {
2416 GscOutputRecord& output_record = gsc_output_buffer_map_[i];
2417 if (output_record.fd != -1)
2418 HANDLE_EINTR(close(output_record.fd));
2419 if (output_record.egl_image != EGL_NO_IMAGE_KHR)
2420 eglDestroyImageKHR(egl_display_, output_record.egl_image);
2421 if (output_record.egl_sync != EGL_NO_SYNC_KHR)
2422 eglDestroySyncKHR(egl_display_, output_record.egl_sync);
2423 if (client_) {
2424 DVLOG(1) << "DestroyGscOutputBuffers(): "
2425 << "dismissing PictureBuffer id=" << output_record.picture_id;
2426 client_->DismissPictureBuffer(output_record.picture_id);
2428 ++i;
2429 } while (i < gsc_output_buffer_map_.size());
2432 struct v4l2_requestbuffers reqbufs;
2433 memset(&reqbufs, 0, sizeof(reqbufs));
2434 reqbufs.count = 0;
2435 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2436 reqbufs.memory = V4L2_MEMORY_DMABUF;
2437 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2438 DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2440 gsc_output_buffer_map_.clear();
2441 gsc_free_output_buffers_.clear();
2444 void ExynosVideoDecodeAccelerator::ResolutionChangeDestroyBuffers() {
2445 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2446 DVLOG(3) << "ResolutionChangeDestroyBuffers()";
2448 DestroyGscInputBuffers();
2449 DestroyGscOutputBuffers();
2450 DestroyMfcOutputBuffers();
2452 // Finish resolution change on decoder thread.
2453 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
2454 &ExynosVideoDecodeAccelerator::FinishResolutionChange,
2455 base::Unretained(this)));
2458 } // namespace content