1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/rtc_video_decoder.h"
8 #include "base/logging.h"
9 #include "base/memory/ref_counted.h"
10 #include "base/message_loop/message_loop_proxy.h"
11 #include "base/metrics/histogram.h"
12 #include "base/numerics/safe_conversions.h"
13 #include "base/stl_util.h"
14 #include "base/synchronization/waitable_event.h"
15 #include "base/task_runner_util.h"
16 #include "content/child/child_thread.h"
17 #include "content/renderer/media/native_handle_impl.h"
18 #include "gpu/command_buffer/common/mailbox_holder.h"
19 #include "media/base/bind_to_current_loop.h"
20 #include "media/filters/gpu_video_accelerator_factories.h"
21 #include "third_party/skia/include/core/SkBitmap.h"
22 #include "third_party/webrtc/common_video/interface/texture_video_frame.h"
23 #include "third_party/webrtc/system_wrappers/interface/ref_count.h"
27 const int32
RTCVideoDecoder::ID_LAST
= 0x3FFFFFFF;
28 const int32
RTCVideoDecoder::ID_HALF
= 0x20000000;
29 const int32
RTCVideoDecoder::ID_INVALID
= -1;
31 // Maximum number of concurrent VDA::Decode() operations RVD will maintain.
32 // Higher values allow better pipelining in the GPU, but also require more
34 static const size_t kMaxInFlightDecodes
= 8;
36 // Size of shared-memory segments we allocate. Since we reuse them we let them
37 // be on the beefy side.
38 static const size_t kSharedMemorySegmentBytes
= 100 << 10;
40 // Maximum number of allocated shared-memory segments.
41 static const int kMaxNumSharedMemorySegments
= 16;
43 // Maximum number of pending WebRTC buffers that are waiting for the shared
44 // memory. 10 seconds for 30 fps.
45 static const size_t kMaxNumOfPendingBuffers
= 300;
47 // A shared memory segment and its allocated size. This class has the ownership
49 class RTCVideoDecoder::SHMBuffer
{
51 SHMBuffer(base::SharedMemory
* shm
, size_t size
);
53 base::SharedMemory
* const shm
;
57 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory
* shm
, size_t size
)
58 : shm(shm
), size(size
) {}
60 RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm
->Close(); }
62 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id
,
65 : bitstream_buffer_id(bitstream_buffer_id
),
69 RTCVideoDecoder::BufferData::BufferData() {}
71 RTCVideoDecoder::BufferData::~BufferData() {}
73 RTCVideoDecoder::RTCVideoDecoder(
74 webrtc::VideoCodecType type
,
75 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& factories
)
76 : video_codec_type_(type
),
77 factories_(factories
),
78 decoder_texture_target_(0),
79 next_picture_buffer_id_(0),
80 state_(UNINITIALIZED
),
81 decode_complete_callback_(NULL
),
83 next_bitstream_buffer_id_(0),
84 reset_bitstream_buffer_id_(ID_INVALID
),
86 DCHECK(!factories_
->GetTaskRunner()->BelongsToCurrentThread());
89 RTCVideoDecoder::~RTCVideoDecoder() {
90 DVLOG(2) << "~RTCVideoDecoder";
91 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
94 // Delete all shared memories.
95 STLDeleteElements(&available_shm_segments_
);
96 STLDeleteValues(&bitstream_buffers_in_decoder_
);
97 STLDeleteContainerPairFirstPointers(decode_buffers_
.begin(),
98 decode_buffers_
.end());
99 decode_buffers_
.clear();
101 // Delete WebRTC input buffers.
102 for (std::deque
<std::pair
<webrtc::EncodedImage
, BufferData
> >::iterator it
=
103 pending_buffers_
.begin();
104 it
!= pending_buffers_
.end();
106 delete[] it
->first
._buffer
;
111 scoped_ptr
<RTCVideoDecoder
> RTCVideoDecoder::Create(
112 webrtc::VideoCodecType type
,
113 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& factories
) {
114 scoped_ptr
<RTCVideoDecoder
> decoder
;
115 // Convert WebRTC codec type to media codec profile.
116 media::VideoCodecProfile profile
;
118 case webrtc::kVideoCodecVP8
:
119 profile
= media::VP8PROFILE_ANY
;
121 case webrtc::kVideoCodecH264
:
122 profile
= media::H264PROFILE_MAIN
;
125 DVLOG(2) << "Video codec not supported:" << type
;
126 return decoder
.Pass();
129 base::WaitableEvent
waiter(true, false);
130 decoder
.reset(new RTCVideoDecoder(type
, factories
));
131 decoder
->factories_
->GetTaskRunner()->PostTask(
133 base::Bind(&RTCVideoDecoder::CreateVDA
,
134 base::Unretained(decoder
.get()),
138 // vda can be NULL if the codec is not supported.
139 if (decoder
->vda_
!= NULL
) {
140 decoder
->state_
= INITIALIZED
;
142 factories
->GetTaskRunner()->DeleteSoon(FROM_HERE
, decoder
.release());
144 return decoder
.Pass();
147 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec
* codecSettings
,
148 int32_t /*numberOfCores*/) {
149 DVLOG(2) << "InitDecode";
150 DCHECK_EQ(video_codec_type_
, codecSettings
->codecType
);
151 if (codecSettings
->codecType
== webrtc::kVideoCodecVP8
&&
152 codecSettings
->codecSpecific
.VP8
.feedbackModeOn
) {
153 LOG(ERROR
) << "Feedback mode not supported";
154 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_ERROR
);
157 base::AutoLock
auto_lock(lock_
);
158 if (state_
== UNINITIALIZED
|| state_
== DECODE_ERROR
) {
159 LOG(ERROR
) << "VDA is not initialized. state=" << state_
;
160 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_UNINITIALIZED
);
162 // Create some shared memory if the queue is empty.
163 if (available_shm_segments_
.size() == 0) {
164 factories_
->GetTaskRunner()->PostTask(
166 base::Bind(&RTCVideoDecoder::CreateSHM
,
167 weak_factory_
.GetWeakPtr(),
169 kSharedMemorySegmentBytes
));
171 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_OK
);
174 int32_t RTCVideoDecoder::Decode(
175 const webrtc::EncodedImage
& inputImage
,
177 const webrtc::RTPFragmentationHeader
* /*fragmentation*/,
178 const webrtc::CodecSpecificInfo
* /*codecSpecificInfo*/,
179 int64_t /*renderTimeMs*/) {
180 DVLOG(3) << "Decode";
182 base::AutoLock
auto_lock(lock_
);
184 if (state_
== UNINITIALIZED
|| decode_complete_callback_
== NULL
) {
185 LOG(ERROR
) << "The decoder has not initialized.";
186 return WEBRTC_VIDEO_CODEC_UNINITIALIZED
;
189 if (state_
== DECODE_ERROR
) {
190 LOG(ERROR
) << "Decoding error occurred.";
191 return WEBRTC_VIDEO_CODEC_ERROR
;
194 if (missingFrames
|| !inputImage
._completeFrame
) {
195 DLOG(ERROR
) << "Missing or incomplete frames.";
196 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames.
197 // Return an error to request a key frame.
198 return WEBRTC_VIDEO_CODEC_ERROR
;
201 // Most platforms' VDA implementations support mid-stream resolution change
202 // internally. Platforms whose VDAs fail to support mid-stream resolution
203 // change gracefully need to have their clients cover for them, and we do that
206 const bool kVDACanHandleMidstreamResize
= false;
208 const bool kVDACanHandleMidstreamResize
= true;
211 bool need_to_reset_for_midstream_resize
= false;
212 if (inputImage
._frameType
== webrtc::kKeyFrame
) {
213 DVLOG(2) << "Got key frame. size=" << inputImage
._encodedWidth
<< "x"
214 << inputImage
._encodedHeight
;
215 gfx::Size prev_frame_size
= frame_size_
;
216 frame_size_
.SetSize(inputImage
._encodedWidth
, inputImage
._encodedHeight
);
217 if (!kVDACanHandleMidstreamResize
&& !prev_frame_size
.IsEmpty() &&
218 prev_frame_size
!= frame_size_
) {
219 need_to_reset_for_midstream_resize
= true;
221 } else if (IsFirstBufferAfterReset(next_bitstream_buffer_id_
,
222 reset_bitstream_buffer_id_
)) {
223 // TODO(wuchengli): VDA should handle it. Remove this when
224 // http://crosbug.com/p/21913 is fixed.
225 DVLOG(1) << "The first frame should be a key frame. Drop this.";
226 return WEBRTC_VIDEO_CODEC_ERROR
;
229 // Create buffer metadata.
230 BufferData
buffer_data(next_bitstream_buffer_id_
,
231 inputImage
._timeStamp
,
233 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
234 next_bitstream_buffer_id_
= (next_bitstream_buffer_id_
+ 1) & ID_LAST
;
236 // If a shared memory segment is available, there are no pending buffers, and
237 // this isn't a mid-stream resolution change, then send the buffer for decode
238 // immediately. Otherwise, save the buffer in the queue for later decode.
239 scoped_ptr
<SHMBuffer
> shm_buffer
;
240 if (!need_to_reset_for_midstream_resize
&& pending_buffers_
.size() == 0)
241 shm_buffer
= GetSHM_Locked(inputImage
._length
);
243 if (!SaveToPendingBuffers_Locked(inputImage
, buffer_data
))
244 return WEBRTC_VIDEO_CODEC_ERROR
;
245 if (need_to_reset_for_midstream_resize
) {
246 base::AutoUnlock
auto_unlock(lock_
);
249 return WEBRTC_VIDEO_CODEC_OK
;
252 SaveToDecodeBuffers_Locked(inputImage
, shm_buffer
.Pass(), buffer_data
);
253 factories_
->GetTaskRunner()->PostTask(
255 base::Bind(&RTCVideoDecoder::RequestBufferDecode
,
256 weak_factory_
.GetWeakPtr()));
257 return WEBRTC_VIDEO_CODEC_OK
;
260 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback(
261 webrtc::DecodedImageCallback
* callback
) {
262 DVLOG(2) << "RegisterDecodeCompleteCallback";
263 base::AutoLock
auto_lock(lock_
);
264 decode_complete_callback_
= callback
;
265 return WEBRTC_VIDEO_CODEC_OK
;
268 int32_t RTCVideoDecoder::Release() {
269 DVLOG(2) << "Release";
270 // Do not destroy VDA because WebRTC can call InitDecode and start decoding
275 int32_t RTCVideoDecoder::Reset() {
277 base::AutoLock
auto_lock(lock_
);
278 if (state_
== UNINITIALIZED
) {
279 LOG(ERROR
) << "Decoder not initialized.";
280 return WEBRTC_VIDEO_CODEC_UNINITIALIZED
;
282 if (next_bitstream_buffer_id_
!= 0)
283 reset_bitstream_buffer_id_
= next_bitstream_buffer_id_
- 1;
285 reset_bitstream_buffer_id_
= ID_LAST
;
286 // If VDA is already resetting, no need to request the reset again.
287 if (state_
!= RESETTING
) {
289 factories_
->GetTaskRunner()->PostTask(
291 base::Bind(&RTCVideoDecoder::ResetInternal
,
292 weak_factory_
.GetWeakPtr()));
294 return WEBRTC_VIDEO_CODEC_OK
;
297 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count
,
298 const gfx::Size
& size
,
299 uint32 texture_target
) {
300 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
301 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target
;
306 std::vector
<uint32
> texture_ids
;
307 std::vector
<gpu::Mailbox
> texture_mailboxes
;
308 decoder_texture_target_
= texture_target
;
309 if (!factories_
->CreateTextures(count
,
313 decoder_texture_target_
)) {
314 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE
);
317 DCHECK_EQ(count
, texture_ids
.size());
318 DCHECK_EQ(count
, texture_mailboxes
.size());
320 std::vector
<media::PictureBuffer
> picture_buffers
;
321 for (size_t i
= 0; i
< texture_ids
.size(); ++i
) {
322 picture_buffers
.push_back(media::PictureBuffer(
323 next_picture_buffer_id_
++, size
, texture_ids
[i
], texture_mailboxes
[i
]));
324 bool inserted
= assigned_picture_buffers_
.insert(std::make_pair(
325 picture_buffers
.back().id(), picture_buffers
.back())).second
;
328 vda_
->AssignPictureBuffers(picture_buffers
);
331 void RTCVideoDecoder::DismissPictureBuffer(int32 id
) {
332 DVLOG(3) << "DismissPictureBuffer. id=" << id
;
333 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
335 std::map
<int32
, media::PictureBuffer
>::iterator it
=
336 assigned_picture_buffers_
.find(id
);
337 if (it
== assigned_picture_buffers_
.end()) {
338 NOTREACHED() << "Missing picture buffer: " << id
;
342 media::PictureBuffer buffer_to_dismiss
= it
->second
;
343 assigned_picture_buffers_
.erase(it
);
345 if (!picture_buffers_at_display_
.count(id
)) {
346 // We can delete the texture immediately as it's not being displayed.
347 factories_
->DeleteTexture(buffer_to_dismiss
.texture_id());
350 // Not destroying a texture in display in |picture_buffers_at_display_|.
351 // Postpone deletion until after it's returned to us.
354 void RTCVideoDecoder::PictureReady(const media::Picture
& picture
) {
355 DVLOG(3) << "PictureReady";
356 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
358 std::map
<int32
, media::PictureBuffer
>::iterator it
=
359 assigned_picture_buffers_
.find(picture
.picture_buffer_id());
360 if (it
== assigned_picture_buffers_
.end()) {
361 NOTREACHED() << "Missing picture buffer: " << picture
.picture_buffer_id();
362 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE
);
365 const media::PictureBuffer
& pb
= it
->second
;
367 // Validate picture rectangle from GPU.
368 if (picture
.visible_rect().IsEmpty() ||
369 !gfx::Rect(pb
.size()).Contains(picture
.visible_rect())) {
370 NOTREACHED() << "Invalid picture size from VDA: "
371 << picture
.visible_rect().ToString() << " should fit in "
372 << pb
.size().ToString();
373 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE
);
377 // Create a media::VideoFrame.
378 uint32_t timestamp
= 0;
379 GetBufferData(picture
.bitstream_buffer_id(), ×tamp
);
380 scoped_refptr
<media::VideoFrame
> frame
=
381 CreateVideoFrame(picture
, pb
, timestamp
);
383 picture_buffers_at_display_
.insert(std::make_pair(
384 picture
.picture_buffer_id(),
385 pb
.texture_id())).second
;
388 // Create a WebRTC video frame.
389 webrtc::RefCountImpl
<NativeHandleImpl
>* handle
=
390 new webrtc::RefCountImpl
<NativeHandleImpl
>(frame
);
391 webrtc::TextureVideoFrame
decoded_image(handle
,
392 picture
.visible_rect().width(),
393 picture
.visible_rect().height(),
397 // Invoke decode callback. WebRTC expects no callback after Reset or Release.
399 base::AutoLock
auto_lock(lock_
);
400 DCHECK(decode_complete_callback_
!= NULL
);
401 if (IsBufferAfterReset(picture
.bitstream_buffer_id(),
402 reset_bitstream_buffer_id_
)) {
403 decode_complete_callback_
->Decoded(decoded_image
);
408 static void ReadPixelsSyncInner(
409 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& factories
,
411 const gfx::Rect
& visible_rect
,
412 const SkBitmap
& pixels
,
413 base::WaitableEvent
* event
) {
414 factories
->ReadPixels(texture_id
, visible_rect
, pixels
);
418 static void ReadPixelsSync(
419 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& factories
,
421 const gfx::Rect
& visible_rect
,
422 const SkBitmap
& pixels
) {
423 base::WaitableEvent
event(true, false);
424 if (!factories
->GetTaskRunner()->PostTask(FROM_HERE
,
425 base::Bind(&ReadPixelsSyncInner
,
435 scoped_refptr
<media::VideoFrame
> RTCVideoDecoder::CreateVideoFrame(
436 const media::Picture
& picture
,
437 const media::PictureBuffer
& pb
,
438 uint32_t timestamp
) {
439 gfx::Rect
visible_rect(picture
.visible_rect());
440 DCHECK(decoder_texture_target_
);
441 // Convert timestamp from 90KHz to ms.
442 base::TimeDelta timestamp_ms
= base::TimeDelta::FromInternalValue(
443 base::checked_cast
<uint64_t>(timestamp
) * 1000 / 90);
444 return media::VideoFrame::WrapNativeTexture(
445 make_scoped_ptr(new gpu::MailboxHolder(
446 pb
.texture_mailbox(), decoder_texture_target_
, 0)),
447 media::BindToCurrentLoop(base::Bind(&RTCVideoDecoder::ReleaseMailbox
,
448 weak_factory_
.GetWeakPtr(),
450 picture
.picture_buffer_id(),
456 base::Bind(&ReadPixelsSync
, factories_
, pb
.texture_id(), visible_rect
));
459 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id
) {
460 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id
;
461 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
463 std::map
<int32
, SHMBuffer
*>::iterator it
=
464 bitstream_buffers_in_decoder_
.find(id
);
465 if (it
== bitstream_buffers_in_decoder_
.end()) {
466 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE
);
467 NOTREACHED() << "Missing bitstream buffer: " << id
;
472 base::AutoLock
auto_lock(lock_
);
473 PutSHM_Locked(scoped_ptr
<SHMBuffer
>(it
->second
));
475 bitstream_buffers_in_decoder_
.erase(it
);
477 RequestBufferDecode();
480 void RTCVideoDecoder::NotifyFlushDone() {
481 DVLOG(3) << "NotifyFlushDone";
482 NOTREACHED() << "Unexpected flush done notification.";
485 void RTCVideoDecoder::NotifyResetDone() {
486 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
487 DVLOG(3) << "NotifyResetDone";
492 input_buffer_data_
.clear();
494 base::AutoLock
auto_lock(lock_
);
495 state_
= INITIALIZED
;
497 // Send the pending buffers for decoding.
498 RequestBufferDecode();
501 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error
) {
502 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
506 LOG(ERROR
) << "VDA Error:" << error
;
507 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError",
509 media::VideoDecodeAccelerator::LARGEST_ERROR_ENUM
);
512 base::AutoLock
auto_lock(lock_
);
513 state_
= DECODE_ERROR
;
516 void RTCVideoDecoder::RequestBufferDecode() {
517 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
521 MovePendingBuffersToDecodeBuffers();
523 while (CanMoreDecodeWorkBeDone()) {
524 // Get a buffer and data from the queue.
525 SHMBuffer
* shm_buffer
= NULL
;
526 BufferData buffer_data
;
528 base::AutoLock
auto_lock(lock_
);
529 // Do not request decode if VDA is resetting.
530 if (decode_buffers_
.size() == 0 || state_
== RESETTING
)
532 shm_buffer
= decode_buffers_
.front().first
;
533 buffer_data
= decode_buffers_
.front().second
;
534 decode_buffers_
.pop_front();
535 // Drop the buffers before Reset or Release is called.
536 if (!IsBufferAfterReset(buffer_data
.bitstream_buffer_id
,
537 reset_bitstream_buffer_id_
)) {
538 PutSHM_Locked(scoped_ptr
<SHMBuffer
>(shm_buffer
));
543 // Create a BitstreamBuffer and send to VDA to decode.
544 media::BitstreamBuffer
bitstream_buffer(buffer_data
.bitstream_buffer_id
,
545 shm_buffer
->shm
->handle(),
547 bool inserted
= bitstream_buffers_in_decoder_
548 .insert(std::make_pair(bitstream_buffer
.id(), shm_buffer
)).second
;
550 RecordBufferData(buffer_data
);
551 vda_
->Decode(bitstream_buffer
);
555 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() {
556 return bitstream_buffers_in_decoder_
.size() < kMaxInFlightDecodes
;
559 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer
, int32 id_reset
) {
560 if (id_reset
== ID_INVALID
)
562 int32 diff
= id_buffer
- id_reset
;
565 return diff
< ID_HALF
;
568 bool RTCVideoDecoder::IsFirstBufferAfterReset(int32 id_buffer
, int32 id_reset
) {
569 if (id_reset
== ID_INVALID
)
570 return id_buffer
== 0;
571 return id_buffer
== ((id_reset
+ 1) & ID_LAST
);
574 void RTCVideoDecoder::SaveToDecodeBuffers_Locked(
575 const webrtc::EncodedImage
& input_image
,
576 scoped_ptr
<SHMBuffer
> shm_buffer
,
577 const BufferData
& buffer_data
) {
578 memcpy(shm_buffer
->shm
->memory(), input_image
._buffer
, input_image
._length
);
579 std::pair
<SHMBuffer
*, BufferData
> buffer_pair
=
580 std::make_pair(shm_buffer
.release(), buffer_data
);
582 // Store the buffer and the metadata to the queue.
583 decode_buffers_
.push_back(buffer_pair
);
586 bool RTCVideoDecoder::SaveToPendingBuffers_Locked(
587 const webrtc::EncodedImage
& input_image
,
588 const BufferData
& buffer_data
) {
589 DVLOG(2) << "SaveToPendingBuffers_Locked"
590 << ". pending_buffers size=" << pending_buffers_
.size()
591 << ". decode_buffers_ size=" << decode_buffers_
.size()
592 << ". available_shm size=" << available_shm_segments_
.size();
593 // Queued too many buffers. Something goes wrong.
594 if (pending_buffers_
.size() >= kMaxNumOfPendingBuffers
) {
595 LOG(WARNING
) << "Too many pending buffers!";
599 // Clone the input image and save it to the queue.
600 uint8_t* buffer
= new uint8_t[input_image
._length
];
601 // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode()
602 // interface to take a non-const ptr to the frame and add a method to the
603 // frame that will swap buffers with another.
604 memcpy(buffer
, input_image
._buffer
, input_image
._length
);
605 webrtc::EncodedImage
encoded_image(
606 buffer
, input_image
._length
, input_image
._length
);
607 std::pair
<webrtc::EncodedImage
, BufferData
> buffer_pair
=
608 std::make_pair(encoded_image
, buffer_data
);
610 pending_buffers_
.push_back(buffer_pair
);
614 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() {
615 base::AutoLock
auto_lock(lock_
);
616 while (pending_buffers_
.size() > 0) {
617 // Get a pending buffer from the queue.
618 const webrtc::EncodedImage
& input_image
= pending_buffers_
.front().first
;
619 const BufferData
& buffer_data
= pending_buffers_
.front().second
;
621 // Drop the frame if it comes before Reset or Release.
622 if (!IsBufferAfterReset(buffer_data
.bitstream_buffer_id
,
623 reset_bitstream_buffer_id_
)) {
624 delete[] input_image
._buffer
;
625 pending_buffers_
.pop_front();
628 // Get shared memory and save it to decode buffers.
629 scoped_ptr
<SHMBuffer
> shm_buffer
= GetSHM_Locked(input_image
._length
);
632 SaveToDecodeBuffers_Locked(input_image
, shm_buffer
.Pass(), buffer_data
);
633 delete[] input_image
._buffer
;
634 pending_buffers_
.pop_front();
638 void RTCVideoDecoder::ResetInternal() {
639 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
640 DVLOG(2) << "ResetInternal";
646 void RTCVideoDecoder::ReleaseMailbox(
647 base::WeakPtr
<RTCVideoDecoder
> decoder
,
648 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& factories
,
649 int64 picture_buffer_id
,
651 uint32 release_sync_point
) {
652 DCHECK(factories
->GetTaskRunner()->BelongsToCurrentThread());
653 factories
->WaitSyncPoint(release_sync_point
);
656 decoder
->ReusePictureBuffer(picture_buffer_id
);
659 // It's the last chance to delete the texture after display,
660 // because RTCVideoDecoder was destructed.
661 factories
->DeleteTexture(texture_id
);
664 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id
) {
665 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
666 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id
;
668 DCHECK(!picture_buffers_at_display_
.empty());
669 PictureBufferTextureMap::iterator display_iterator
=
670 picture_buffers_at_display_
.find(picture_buffer_id
);
671 uint32 texture_id
= display_iterator
->second
;
672 DCHECK(display_iterator
!= picture_buffers_at_display_
.end());
673 picture_buffers_at_display_
.erase(display_iterator
);
675 if (!assigned_picture_buffers_
.count(picture_buffer_id
)) {
676 // This picture was dismissed while in display, so we postponed deletion.
677 factories_
->DeleteTexture(texture_id
);
681 // DestroyVDA() might already have been called.
683 vda_
->ReusePictureBuffer(picture_buffer_id
);
686 void RTCVideoDecoder::CreateVDA(media::VideoCodecProfile profile
,
687 base::WaitableEvent
* waiter
) {
688 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
689 vda_
= factories_
->CreateVideoDecodeAccelerator();
690 if (vda_
&& !vda_
->Initialize(profile
, this))
691 vda_
.release()->Destroy();
695 void RTCVideoDecoder::DestroyTextures() {
696 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
698 // Not destroying PictureBuffers in |picture_buffers_at_display_| yet, since
699 // their textures may still be in use by the user of this RTCVideoDecoder.
700 for (PictureBufferTextureMap::iterator it
=
701 picture_buffers_at_display_
.begin();
702 it
!= picture_buffers_at_display_
.end();
704 assigned_picture_buffers_
.erase(it
->first
);
707 for (std::map
<int32
, media::PictureBuffer
>::iterator it
=
708 assigned_picture_buffers_
.begin();
709 it
!= assigned_picture_buffers_
.end();
711 factories_
->DeleteTexture(it
->second
.texture_id());
713 assigned_picture_buffers_
.clear();
716 void RTCVideoDecoder::DestroyVDA() {
717 DVLOG(2) << "DestroyVDA";
718 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
720 vda_
.release()->Destroy();
722 base::AutoLock
auto_lock(lock_
);
723 state_
= UNINITIALIZED
;
726 scoped_ptr
<RTCVideoDecoder::SHMBuffer
> RTCVideoDecoder::GetSHM_Locked(
728 // Reuse a SHM if possible.
729 SHMBuffer
* ret
= NULL
;
730 if (!available_shm_segments_
.empty() &&
731 available_shm_segments_
.back()->size
>= min_size
) {
732 ret
= available_shm_segments_
.back();
733 available_shm_segments_
.pop_back();
735 // Post to vda thread to create shared memory if SHM cannot be reused or the
736 // queue is almost empty.
737 if (num_shm_buffers_
< kMaxNumSharedMemorySegments
&&
738 (ret
== NULL
|| available_shm_segments_
.size() <= 1)) {
739 factories_
->GetTaskRunner()->PostTask(
741 base::Bind(&RTCVideoDecoder::CreateSHM
,
742 weak_factory_
.GetWeakPtr(),
746 return scoped_ptr
<SHMBuffer
>(ret
);
749 void RTCVideoDecoder::PutSHM_Locked(scoped_ptr
<SHMBuffer
> shm_buffer
) {
750 available_shm_segments_
.push_back(shm_buffer
.release());
753 void RTCVideoDecoder::CreateSHM(int number
, size_t min_size
) {
754 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
755 DVLOG(2) << "CreateSHM. size=" << min_size
;
756 int number_to_allocate
;
758 base::AutoLock
auto_lock(lock_
);
760 std::min(kMaxNumSharedMemorySegments
- num_shm_buffers_
, number
);
762 size_t size_to_allocate
= std::max(min_size
, kSharedMemorySegmentBytes
);
763 for (int i
= 0; i
< number_to_allocate
; i
++) {
764 base::SharedMemory
* shm
= factories_
->CreateSharedMemory(size_to_allocate
);
766 base::AutoLock
auto_lock(lock_
);
769 scoped_ptr
<SHMBuffer
>(new SHMBuffer(shm
, size_to_allocate
)));
772 // Kick off the decoding.
773 RequestBufferDecode();
776 void RTCVideoDecoder::RecordBufferData(const BufferData
& buffer_data
) {
777 input_buffer_data_
.push_front(buffer_data
);
778 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but
779 // that's too small for some pathological B-frame test videos. The cost of
780 // using too-high a value is low (192 bits per extra slot).
781 static const size_t kMaxInputBufferDataSize
= 128;
782 // Pop from the back of the list, because that's the oldest and least likely
783 // to be useful in the future data.
784 if (input_buffer_data_
.size() > kMaxInputBufferDataSize
)
785 input_buffer_data_
.pop_back();
788 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id
,
789 uint32_t* timestamp
) {
790 for (std::list
<BufferData
>::iterator it
= input_buffer_data_
.begin();
791 it
!= input_buffer_data_
.end();
793 if (it
->bitstream_buffer_id
!= bitstream_buffer_id
)
795 *timestamp
= it
->timestamp
;
798 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id
;
801 int32_t RTCVideoDecoder::RecordInitDecodeUMA(int32_t status
) {
802 // Logging boolean is enough to know if HW decoding has been used. Also,
803 // InitDecode is less likely to return an error so enum is not used here.
804 bool sample
= (status
== WEBRTC_VIDEO_CODEC_OK
) ? true : false;
805 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", sample
);
809 void RTCVideoDecoder::DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent()
811 DCHECK(factories_
->GetTaskRunner()->BelongsToCurrentThread());
814 } // namespace content