1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/rtc_video_decoder.h"
8 #include "base/logging.h"
9 #include "base/memory/ref_counted.h"
10 #include "base/metrics/histogram.h"
11 #include "base/numerics/safe_conversions.h"
12 #include "base/stl_util.h"
13 #include "base/synchronization/waitable_event.h"
14 #include "base/task_runner_util.h"
15 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h"
16 #include "gpu/command_buffer/common/mailbox_holder.h"
17 #include "media/base/bind_to_current_loop.h"
18 #include "media/renderers/gpu_video_accelerator_factories.h"
19 #include "third_party/skia/include/core/SkBitmap.h"
20 #include "third_party/webrtc/base/bind.h"
21 #include "third_party/webrtc/system_wrappers/interface/ref_count.h"
22 #include "third_party/webrtc/video_frame.h"
26 const int32
RTCVideoDecoder::ID_LAST
= 0x3FFFFFFF;
27 const int32
RTCVideoDecoder::ID_HALF
= 0x20000000;
28 const int32
RTCVideoDecoder::ID_INVALID
= -1;
30 // Maximum number of concurrent VDA::Decode() operations RVD will maintain.
31 // Higher values allow better pipelining in the GPU, but also require more
33 static const size_t kMaxInFlightDecodes
= 8;
35 // Number of allocated shared memory segments.
36 static const size_t kNumSharedMemorySegments
= 16;
38 // Maximum number of pending WebRTC buffers that are waiting for shared memory.
39 static const size_t kMaxNumOfPendingBuffers
= 8;
41 // A shared memory segment and its allocated size. This class has the ownership
43 class RTCVideoDecoder::SHMBuffer
{
45 SHMBuffer(scoped_ptr
<base::SharedMemory
> shm
, size_t size
);
47 scoped_ptr
<base::SharedMemory
> const shm
;
51 RTCVideoDecoder::SHMBuffer::SHMBuffer(scoped_ptr
<base::SharedMemory
> shm
,
53 : shm(shm
.Pass()), size(size
) {
56 RTCVideoDecoder::SHMBuffer::~SHMBuffer() {
59 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id
,
62 : bitstream_buffer_id(bitstream_buffer_id
),
66 RTCVideoDecoder::BufferData::BufferData() {}
68 RTCVideoDecoder::BufferData::~BufferData() {}
70 RTCVideoDecoder::RTCVideoDecoder(
71 webrtc::VideoCodecType type
,
72 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& factories
)
73 : video_codec_type_(type
),
74 factories_(factories
),
75 decoder_texture_target_(0),
76 next_picture_buffer_id_(0),
77 state_(UNINITIALIZED
),
78 decode_complete_callback_(NULL
),
80 next_bitstream_buffer_id_(0),
81 reset_bitstream_buffer_id_(ID_INVALID
),
83 DCHECK(!factories_
->GetTaskRunner()->BelongsToCurrentThread());
86 RTCVideoDecoder::~RTCVideoDecoder() {
87 DVLOG(2) << "~RTCVideoDecoder";
88 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
91 // Delete all shared memories.
92 STLDeleteElements(&available_shm_segments_
);
93 STLDeleteValues(&bitstream_buffers_in_decoder_
);
94 STLDeleteContainerPairFirstPointers(decode_buffers_
.begin(),
95 decode_buffers_
.end());
96 decode_buffers_
.clear();
97 ClearPendingBuffers();
101 scoped_ptr
<RTCVideoDecoder
> RTCVideoDecoder::Create(
102 webrtc::VideoCodecType type
,
103 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& factories
) {
104 scoped_ptr
<RTCVideoDecoder
> decoder
;
105 // Convert WebRTC codec type to media codec profile.
106 media::VideoCodecProfile profile
;
108 case webrtc::kVideoCodecVP8
:
109 profile
= media::VP8PROFILE_ANY
;
111 case webrtc::kVideoCodecH264
:
112 profile
= media::H264PROFILE_MAIN
;
115 DVLOG(2) << "Video codec not supported:" << type
;
116 return decoder
.Pass();
119 base::WaitableEvent
waiter(true, false);
120 decoder
.reset(new RTCVideoDecoder(type
, factories
));
121 decoder
->factories_
->GetTaskRunner()->PostTask(
123 base::Bind(&RTCVideoDecoder::CreateVDA
,
124 base::Unretained(decoder
.get()),
128 // vda can be NULL if the codec is not supported.
129 if (decoder
->vda_
!= NULL
) {
130 decoder
->state_
= INITIALIZED
;
132 factories
->GetTaskRunner()->DeleteSoon(FROM_HERE
, decoder
.release());
134 return decoder
.Pass();
137 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec
* codecSettings
,
138 int32_t /*numberOfCores*/) {
139 DVLOG(2) << "InitDecode";
140 DCHECK_EQ(video_codec_type_
, codecSettings
->codecType
);
141 if (codecSettings
->codecType
== webrtc::kVideoCodecVP8
&&
142 codecSettings
->codecSpecific
.VP8
.feedbackModeOn
) {
143 LOG(ERROR
) << "Feedback mode not supported";
144 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_ERROR
);
147 base::AutoLock
auto_lock(lock_
);
148 if (state_
== UNINITIALIZED
|| state_
== DECODE_ERROR
) {
149 LOG(ERROR
) << "VDA is not initialized. state=" << state_
;
150 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_UNINITIALIZED
);
153 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_OK
);
156 int32_t RTCVideoDecoder::Decode(
157 const webrtc::EncodedImage
& inputImage
,
159 const webrtc::RTPFragmentationHeader
* /*fragmentation*/,
160 const webrtc::CodecSpecificInfo
* /*codecSpecificInfo*/,
161 int64_t /*renderTimeMs*/) {
162 DVLOG(3) << "Decode";
164 base::AutoLock
auto_lock(lock_
);
166 if (state_
== UNINITIALIZED
|| decode_complete_callback_
== NULL
) {
167 LOG(ERROR
) << "The decoder has not initialized.";
168 return WEBRTC_VIDEO_CODEC_UNINITIALIZED
;
171 if (state_
== DECODE_ERROR
) {
172 LOG(ERROR
) << "Decoding error occurred.";
173 return WEBRTC_VIDEO_CODEC_ERROR
;
176 if (missingFrames
|| !inputImage
._completeFrame
) {
177 DLOG(ERROR
) << "Missing or incomplete frames.";
178 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames.
179 // Return an error to request a key frame.
180 return WEBRTC_VIDEO_CODEC_ERROR
;
183 // Most platforms' VDA implementations support mid-stream resolution change
184 // internally. Platforms whose VDAs fail to support mid-stream resolution
185 // change gracefully need to have their clients cover for them, and we do that
188 const bool kVDACanHandleMidstreamResize
= false;
190 const bool kVDACanHandleMidstreamResize
= true;
193 bool need_to_reset_for_midstream_resize
= false;
194 if (inputImage
._frameType
== webrtc::kKeyFrame
) {
195 gfx::Size
new_frame_size(inputImage
._encodedWidth
,
196 inputImage
._encodedHeight
);
197 DVLOG(2) << "Got key frame. size=" << new_frame_size
.ToString();
199 if (new_frame_size
.width() > max_resolution_
.width() ||
200 new_frame_size
.width() < min_resolution_
.width() ||
201 new_frame_size
.height() > max_resolution_
.height() ||
202 new_frame_size
.height() < min_resolution_
.height()) {
203 DVLOG(1) << "Resolution unsupported, falling back to software decode";
204 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE
;
207 gfx::Size prev_frame_size
= frame_size_
;
208 frame_size_
= new_frame_size
;
209 if (!kVDACanHandleMidstreamResize
&& !prev_frame_size
.IsEmpty() &&
210 prev_frame_size
!= frame_size_
) {
211 need_to_reset_for_midstream_resize
= true;
213 } else if (IsFirstBufferAfterReset(next_bitstream_buffer_id_
,
214 reset_bitstream_buffer_id_
)) {
215 // TODO(wuchengli): VDA should handle it. Remove this when
216 // http://crosbug.com/p/21913 is fixed.
217 DVLOG(1) << "The first frame should be a key frame. Drop this.";
218 return WEBRTC_VIDEO_CODEC_ERROR
;
221 // Create buffer metadata.
222 BufferData
buffer_data(next_bitstream_buffer_id_
,
223 inputImage
._timeStamp
,
225 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
226 next_bitstream_buffer_id_
= (next_bitstream_buffer_id_
+ 1) & ID_LAST
;
228 // If a shared memory segment is available, there are no pending buffers, and
229 // this isn't a mid-stream resolution change, then send the buffer for decode
230 // immediately. Otherwise, save the buffer in the queue for later decode.
231 scoped_ptr
<SHMBuffer
> shm_buffer
;
232 if (!need_to_reset_for_midstream_resize
&& pending_buffers_
.empty())
233 shm_buffer
= GetSHM_Locked(inputImage
._length
);
235 if (!SaveToPendingBuffers_Locked(inputImage
, buffer_data
)) {
236 // We have exceeded the pending buffers count, we are severely behind.
237 // Since we are returning ERROR, WebRTC will not be interested in the
238 // remaining buffers, and will provide us with a new keyframe instead.
239 // Better to drop any pending buffers and start afresh to catch up faster.
240 DVLOG(1) << "Exceeded maximum pending buffer count, dropping";
241 ClearPendingBuffers();
242 return WEBRTC_VIDEO_CODEC_ERROR
;
245 if (need_to_reset_for_midstream_resize
) {
246 base::AutoUnlock
auto_unlock(lock_
);
250 return WEBRTC_VIDEO_CODEC_OK
;
253 SaveToDecodeBuffers_Locked(inputImage
, shm_buffer
.Pass(), buffer_data
);
254 factories_
->GetTaskRunner()->PostTask(
256 base::Bind(&RTCVideoDecoder::RequestBufferDecode
,
257 weak_factory_
.GetWeakPtr()));
258 return WEBRTC_VIDEO_CODEC_OK
;
261 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback(
262 webrtc::DecodedImageCallback
* callback
) {
263 DVLOG(2) << "RegisterDecodeCompleteCallback";
264 base::AutoLock
auto_lock(lock_
);
265 decode_complete_callback_
= callback
;
266 return WEBRTC_VIDEO_CODEC_OK
;
269 int32_t RTCVideoDecoder::Release() {
270 DVLOG(2) << "Release";
271 // Do not destroy VDA because WebRTC can call InitDecode and start decoding
276 int32_t RTCVideoDecoder::Reset() {
278 base::AutoLock
auto_lock(lock_
);
279 if (state_
== UNINITIALIZED
) {
280 LOG(ERROR
) << "Decoder not initialized.";
281 return WEBRTC_VIDEO_CODEC_UNINITIALIZED
;
283 if (next_bitstream_buffer_id_
!= 0)
284 reset_bitstream_buffer_id_
= next_bitstream_buffer_id_
- 1;
286 reset_bitstream_buffer_id_
= ID_LAST
;
287 // If VDA is already resetting, no need to request the reset again.
288 if (state_
!= RESETTING
) {
290 factories_
->GetTaskRunner()->PostTask(
292 base::Bind(&RTCVideoDecoder::ResetInternal
,
293 weak_factory_
.GetWeakPtr()));
295 return WEBRTC_VIDEO_CODEC_OK
;
298 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count
,
299 const gfx::Size
& size
,
300 uint32 texture_target
) {
301 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
302 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target
;
307 std::vector
<uint32
> texture_ids
;
308 std::vector
<gpu::Mailbox
> texture_mailboxes
;
309 decoder_texture_target_
= texture_target
;
310 if (!factories_
->CreateTextures(count
,
314 decoder_texture_target_
)) {
315 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE
);
318 DCHECK_EQ(count
, texture_ids
.size());
319 DCHECK_EQ(count
, texture_mailboxes
.size());
321 std::vector
<media::PictureBuffer
> picture_buffers
;
322 for (size_t i
= 0; i
< texture_ids
.size(); ++i
) {
323 picture_buffers
.push_back(media::PictureBuffer(
324 next_picture_buffer_id_
++, size
, texture_ids
[i
], texture_mailboxes
[i
]));
325 bool inserted
= assigned_picture_buffers_
.insert(std::make_pair(
326 picture_buffers
.back().id(), picture_buffers
.back())).second
;
329 vda_
->AssignPictureBuffers(picture_buffers
);
332 void RTCVideoDecoder::DismissPictureBuffer(int32 id
) {
333 DVLOG(3) << "DismissPictureBuffer. id=" << id
;
334 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
336 std::map
<int32
, media::PictureBuffer
>::iterator it
=
337 assigned_picture_buffers_
.find(id
);
338 if (it
== assigned_picture_buffers_
.end()) {
339 NOTREACHED() << "Missing picture buffer: " << id
;
343 media::PictureBuffer buffer_to_dismiss
= it
->second
;
344 assigned_picture_buffers_
.erase(it
);
346 if (!picture_buffers_at_display_
.count(id
)) {
347 // We can delete the texture immediately as it's not being displayed.
348 factories_
->DeleteTexture(buffer_to_dismiss
.texture_id());
351 // Not destroying a texture in display in |picture_buffers_at_display_|.
352 // Postpone deletion until after it's returned to us.
355 void RTCVideoDecoder::PictureReady(const media::Picture
& picture
) {
356 DVLOG(3) << "PictureReady";
357 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
359 std::map
<int32
, media::PictureBuffer
>::iterator it
=
360 assigned_picture_buffers_
.find(picture
.picture_buffer_id());
361 if (it
== assigned_picture_buffers_
.end()) {
362 NOTREACHED() << "Missing picture buffer: " << picture
.picture_buffer_id();
363 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE
);
366 const media::PictureBuffer
& pb
= it
->second
;
368 // Validate picture rectangle from GPU.
369 if (picture
.visible_rect().IsEmpty() ||
370 !gfx::Rect(pb
.size()).Contains(picture
.visible_rect())) {
371 NOTREACHED() << "Invalid picture size from VDA: "
372 << picture
.visible_rect().ToString() << " should fit in "
373 << pb
.size().ToString();
374 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE
);
378 // Create a media::VideoFrame.
379 uint32_t timestamp
= 0;
380 GetBufferData(picture
.bitstream_buffer_id(), ×tamp
);
381 scoped_refptr
<media::VideoFrame
> frame
=
382 CreateVideoFrame(picture
, pb
, timestamp
);
384 picture_buffers_at_display_
.insert(std::make_pair(
385 picture
.picture_buffer_id(),
386 pb
.texture_id())).second
;
389 // Create a WebRTC video frame.
390 webrtc::VideoFrame
decoded_image(
391 new rtc::RefCountedObject
<WebRtcVideoFrameAdapter
>(frame
), timestamp
, 0,
392 webrtc::kVideoRotation_0
);
394 // Invoke decode callback. WebRTC expects no callback after Reset or Release.
396 base::AutoLock
auto_lock(lock_
);
397 DCHECK(decode_complete_callback_
!= NULL
);
398 if (IsBufferAfterReset(picture
.bitstream_buffer_id(),
399 reset_bitstream_buffer_id_
)) {
400 decode_complete_callback_
->Decoded(decoded_image
);
405 scoped_refptr
<media::VideoFrame
> RTCVideoDecoder::CreateVideoFrame(
406 const media::Picture
& picture
,
407 const media::PictureBuffer
& pb
,
408 uint32_t timestamp
) {
409 gfx::Rect
visible_rect(picture
.visible_rect());
410 DCHECK(decoder_texture_target_
);
411 // Convert timestamp from 90KHz to ms.
412 base::TimeDelta timestamp_ms
= base::TimeDelta::FromInternalValue(
413 base::checked_cast
<uint64_t>(timestamp
) * 1000 / 90);
414 // TODO(mcasas): The incoming data is actually a YUV format, but is labelled
415 // as ARGB. This prevents the compositor from messing with it, since the
416 // underlying platform can handle the former format natively. Make sure the
417 // correct format is used and everyone down the line understands it.
418 scoped_refptr
<media::VideoFrame
> frame(media::VideoFrame::WrapNativeTexture(
419 media::PIXEL_FORMAT_ARGB
,
420 gpu::MailboxHolder(pb
.texture_mailbox(), decoder_texture_target_
, 0),
421 media::BindToCurrentLoop(base::Bind(
422 &RTCVideoDecoder::ReleaseMailbox
, weak_factory_
.GetWeakPtr(),
423 factories_
, picture
.picture_buffer_id(), pb
.texture_id())),
424 pb
.size(), visible_rect
, visible_rect
.size(), timestamp_ms
));
425 if (picture
.allow_overlay()) {
426 frame
->metadata()->SetBoolean(media::VideoFrameMetadata::ALLOW_OVERLAY
,
432 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id
) {
433 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id
;
434 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
436 std::map
<int32
, SHMBuffer
*>::iterator it
=
437 bitstream_buffers_in_decoder_
.find(id
);
438 if (it
== bitstream_buffers_in_decoder_
.end()) {
439 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE
);
440 NOTREACHED() << "Missing bitstream buffer: " << id
;
445 base::AutoLock
auto_lock(lock_
);
446 PutSHM_Locked(scoped_ptr
<SHMBuffer
>(it
->second
));
448 bitstream_buffers_in_decoder_
.erase(it
);
450 RequestBufferDecode();
453 void RTCVideoDecoder::NotifyFlushDone() {
454 DVLOG(3) << "NotifyFlushDone";
455 NOTREACHED() << "Unexpected flush done notification.";
458 void RTCVideoDecoder::NotifyResetDone() {
459 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
460 DVLOG(3) << "NotifyResetDone";
465 input_buffer_data_
.clear();
467 base::AutoLock
auto_lock(lock_
);
468 state_
= INITIALIZED
;
470 // Send the pending buffers for decoding.
471 RequestBufferDecode();
474 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error
) {
475 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
479 LOG(ERROR
) << "VDA Error:" << error
;
480 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError",
482 media::VideoDecodeAccelerator::LARGEST_ERROR_ENUM
);
485 base::AutoLock
auto_lock(lock_
);
486 state_
= DECODE_ERROR
;
489 void RTCVideoDecoder::RequestBufferDecode() {
490 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
494 MovePendingBuffersToDecodeBuffers();
496 while (CanMoreDecodeWorkBeDone()) {
497 // Get a buffer and data from the queue.
498 SHMBuffer
* shm_buffer
= NULL
;
499 BufferData buffer_data
;
501 base::AutoLock
auto_lock(lock_
);
502 // Do not request decode if VDA is resetting.
503 if (decode_buffers_
.empty() || state_
== RESETTING
)
505 shm_buffer
= decode_buffers_
.front().first
;
506 buffer_data
= decode_buffers_
.front().second
;
507 decode_buffers_
.pop_front();
508 // Drop the buffers before Reset or Release is called.
509 if (!IsBufferAfterReset(buffer_data
.bitstream_buffer_id
,
510 reset_bitstream_buffer_id_
)) {
511 PutSHM_Locked(scoped_ptr
<SHMBuffer
>(shm_buffer
));
516 // Create a BitstreamBuffer and send to VDA to decode.
517 media::BitstreamBuffer
bitstream_buffer(buffer_data
.bitstream_buffer_id
,
518 shm_buffer
->shm
->handle(),
520 bool inserted
= bitstream_buffers_in_decoder_
521 .insert(std::make_pair(bitstream_buffer
.id(), shm_buffer
)).second
;
523 RecordBufferData(buffer_data
);
524 vda_
->Decode(bitstream_buffer
);
528 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() {
529 return bitstream_buffers_in_decoder_
.size() < kMaxInFlightDecodes
;
532 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer
, int32 id_reset
) {
533 if (id_reset
== ID_INVALID
)
535 int32 diff
= id_buffer
- id_reset
;
538 return diff
< ID_HALF
;
541 bool RTCVideoDecoder::IsFirstBufferAfterReset(int32 id_buffer
, int32 id_reset
) {
542 if (id_reset
== ID_INVALID
)
543 return id_buffer
== 0;
544 return id_buffer
== ((id_reset
+ 1) & ID_LAST
);
547 void RTCVideoDecoder::SaveToDecodeBuffers_Locked(
548 const webrtc::EncodedImage
& input_image
,
549 scoped_ptr
<SHMBuffer
> shm_buffer
,
550 const BufferData
& buffer_data
) {
551 memcpy(shm_buffer
->shm
->memory(), input_image
._buffer
, input_image
._length
);
552 std::pair
<SHMBuffer
*, BufferData
> buffer_pair
=
553 std::make_pair(shm_buffer
.release(), buffer_data
);
555 // Store the buffer and the metadata to the queue.
556 decode_buffers_
.push_back(buffer_pair
);
559 bool RTCVideoDecoder::SaveToPendingBuffers_Locked(
560 const webrtc::EncodedImage
& input_image
,
561 const BufferData
& buffer_data
) {
562 DVLOG(2) << "SaveToPendingBuffers_Locked"
563 << ". pending_buffers size=" << pending_buffers_
.size()
564 << ". decode_buffers_ size=" << decode_buffers_
.size()
565 << ". available_shm size=" << available_shm_segments_
.size();
566 // Queued too many buffers. Something goes wrong.
567 if (pending_buffers_
.size() >= kMaxNumOfPendingBuffers
) {
568 LOG(WARNING
) << "Too many pending buffers!";
572 // Clone the input image and save it to the queue.
573 uint8_t* buffer
= new uint8_t[input_image
._length
];
574 // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode()
575 // interface to take a non-const ptr to the frame and add a method to the
576 // frame that will swap buffers with another.
577 memcpy(buffer
, input_image
._buffer
, input_image
._length
);
578 webrtc::EncodedImage
encoded_image(
579 buffer
, input_image
._length
, input_image
._length
);
580 std::pair
<webrtc::EncodedImage
, BufferData
> buffer_pair
=
581 std::make_pair(encoded_image
, buffer_data
);
583 pending_buffers_
.push_back(buffer_pair
);
587 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() {
588 base::AutoLock
auto_lock(lock_
);
589 while (pending_buffers_
.size() > 0) {
590 // Get a pending buffer from the queue.
591 const webrtc::EncodedImage
& input_image
= pending_buffers_
.front().first
;
592 const BufferData
& buffer_data
= pending_buffers_
.front().second
;
594 // Drop the frame if it comes before Reset or Release.
595 if (!IsBufferAfterReset(buffer_data
.bitstream_buffer_id
,
596 reset_bitstream_buffer_id_
)) {
597 delete[] input_image
._buffer
;
598 pending_buffers_
.pop_front();
601 // Get shared memory and save it to decode buffers.
602 scoped_ptr
<SHMBuffer
> shm_buffer
= GetSHM_Locked(input_image
._length
);
605 SaveToDecodeBuffers_Locked(input_image
, shm_buffer
.Pass(), buffer_data
);
606 delete[] input_image
._buffer
;
607 pending_buffers_
.pop_front();
611 void RTCVideoDecoder::ResetInternal() {
612 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
613 DVLOG(2) << "ResetInternal";
619 void RTCVideoDecoder::ReleaseMailbox(
620 base::WeakPtr
<RTCVideoDecoder
> decoder
,
621 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& factories
,
622 int64 picture_buffer_id
,
624 uint32 release_sync_point
) {
625 DCHECK(factories
->GetTaskRunner()->BelongsToCurrentThread());
626 factories
->WaitSyncPoint(release_sync_point
);
629 decoder
->ReusePictureBuffer(picture_buffer_id
);
632 // It's the last chance to delete the texture after display,
633 // because RTCVideoDecoder was destructed.
634 factories
->DeleteTexture(texture_id
);
637 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id
) {
638 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
639 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id
;
641 DCHECK(!picture_buffers_at_display_
.empty());
642 PictureBufferTextureMap::iterator display_iterator
=
643 picture_buffers_at_display_
.find(picture_buffer_id
);
644 uint32 texture_id
= display_iterator
->second
;
645 DCHECK(display_iterator
!= picture_buffers_at_display_
.end());
646 picture_buffers_at_display_
.erase(display_iterator
);
648 if (!assigned_picture_buffers_
.count(picture_buffer_id
)) {
649 // This picture was dismissed while in display, so we postponed deletion.
650 factories_
->DeleteTexture(texture_id
);
654 // DestroyVDA() might already have been called.
656 vda_
->ReusePictureBuffer(picture_buffer_id
);
659 bool RTCVideoDecoder::IsProfileSupported(media::VideoCodecProfile profile
) {
660 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
661 media::VideoDecodeAccelerator::SupportedProfiles supported_profiles
=
662 factories_
->GetVideoDecodeAcceleratorSupportedProfiles();
664 for (const auto& supported_profile
: supported_profiles
) {
665 if (profile
== supported_profile
.profile
) {
666 min_resolution_
= supported_profile
.min_resolution
;
667 max_resolution_
= supported_profile
.max_resolution
;
675 void RTCVideoDecoder::CreateVDA(media::VideoCodecProfile profile
,
676 base::WaitableEvent
* waiter
) {
677 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
679 if (!IsProfileSupported(profile
)) {
680 DVLOG(1) << "Unsupported profile " << profile
;
682 vda_
= factories_
->CreateVideoDecodeAccelerator();
683 if (vda_
&& !vda_
->Initialize(profile
, this))
684 vda_
.release()->Destroy();
690 void RTCVideoDecoder::DestroyTextures() {
691 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
693 // Not destroying PictureBuffers in |picture_buffers_at_display_| yet, since
694 // their textures may still be in use by the user of this RTCVideoDecoder.
695 for (PictureBufferTextureMap::iterator it
=
696 picture_buffers_at_display_
.begin();
697 it
!= picture_buffers_at_display_
.end();
699 assigned_picture_buffers_
.erase(it
->first
);
702 for (std::map
<int32
, media::PictureBuffer
>::iterator it
=
703 assigned_picture_buffers_
.begin();
704 it
!= assigned_picture_buffers_
.end();
706 factories_
->DeleteTexture(it
->second
.texture_id());
708 assigned_picture_buffers_
.clear();
711 void RTCVideoDecoder::DestroyVDA() {
712 DVLOG(2) << "DestroyVDA";
713 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
715 vda_
.release()->Destroy();
717 base::AutoLock
auto_lock(lock_
);
718 state_
= UNINITIALIZED
;
721 scoped_ptr
<RTCVideoDecoder::SHMBuffer
> RTCVideoDecoder::GetSHM_Locked(
723 // Reuse a SHM if possible.
724 if (!available_shm_segments_
.empty() &&
725 available_shm_segments_
.back()->size
>= min_size
) {
726 scoped_ptr
<SHMBuffer
> buffer(available_shm_segments_
.back());
727 available_shm_segments_
.pop_back();
731 if (available_shm_segments_
.size() != num_shm_buffers_
) {
732 // Either available_shm_segments_ is empty (and we already have some SHM
733 // buffers allocated), or the size of available segments is not large
734 // enough. In the former case we need to wait for buffers to be returned,
735 // in the latter we need to wait for all buffers to be returned to drop
736 // them and reallocate with a new size.
740 if (num_shm_buffers_
!= 0) {
741 STLDeleteElements(&available_shm_segments_
);
742 num_shm_buffers_
= 0;
745 // Create twice as large buffers as required, to avoid frequent reallocation.
746 factories_
->GetTaskRunner()->PostTask(
748 base::Bind(&RTCVideoDecoder::CreateSHM
, weak_factory_
.GetWeakPtr(),
749 kNumSharedMemorySegments
, min_size
* 2));
751 // We'll be called again after the shared memory is created.
755 void RTCVideoDecoder::PutSHM_Locked(scoped_ptr
<SHMBuffer
> shm_buffer
) {
756 available_shm_segments_
.push_back(shm_buffer
.release());
759 void RTCVideoDecoder::CreateSHM(size_t count
, size_t size
) {
760 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
761 DVLOG(2) << "CreateSHM. count=" << count
<< ", size=" << size
;
763 for (size_t i
= 0; i
< count
; i
++) {
764 scoped_ptr
<base::SharedMemory
> shm
= factories_
->CreateSharedMemory(size
);
766 LOG(ERROR
) << "Failed allocating shared memory of size=" << size
;
767 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE
);
771 base::AutoLock
auto_lock(lock_
);
772 PutSHM_Locked(scoped_ptr
<SHMBuffer
>(new SHMBuffer(shm
.Pass(), size
)));
776 // Kick off the decoding.
777 RequestBufferDecode();
780 void RTCVideoDecoder::RecordBufferData(const BufferData
& buffer_data
) {
781 input_buffer_data_
.push_front(buffer_data
);
782 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but
783 // that's too small for some pathological B-frame test videos. The cost of
784 // using too-high a value is low (192 bits per extra slot).
785 static const size_t kMaxInputBufferDataSize
= 128;
786 // Pop from the back of the list, because that's the oldest and least likely
787 // to be useful in the future data.
788 if (input_buffer_data_
.size() > kMaxInputBufferDataSize
)
789 input_buffer_data_
.pop_back();
792 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id
,
793 uint32_t* timestamp
) {
794 for (std::list
<BufferData
>::iterator it
= input_buffer_data_
.begin();
795 it
!= input_buffer_data_
.end();
797 if (it
->bitstream_buffer_id
!= bitstream_buffer_id
)
799 *timestamp
= it
->timestamp
;
802 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id
;
805 int32_t RTCVideoDecoder::RecordInitDecodeUMA(int32_t status
) {
806 // Logging boolean is enough to know if HW decoding has been used. Also,
807 // InitDecode is less likely to return an error so enum is not used here.
808 bool sample
= (status
== WEBRTC_VIDEO_CODEC_OK
) ? true : false;
809 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", sample
);
813 void RTCVideoDecoder::DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent()
815 DCHECK(factories_
->GetTaskRunner()->BelongsToCurrentThread());
818 void RTCVideoDecoder::ClearPendingBuffers() {
819 // Delete WebRTC input buffers.
820 for (std::deque
<std::pair
<webrtc::EncodedImage
, BufferData
>>::iterator it
=
821 pending_buffers_
.begin();
822 it
!= pending_buffers_
.end(); ++it
) {
823 delete[] it
->first
._buffer
;
826 pending_buffers_
.clear();
829 } // namespace content