IndexedDBFactory now ForceCloses databases.
[chromium-blink-merge.git] / content / renderer / media / rtc_video_decoder.cc
blob106f224207d3a4b7cb1bc07000fe497a1c452b9a
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/rtc_video_decoder.h"
7 #include "base/bind.h"
8 #include "base/logging.h"
9 #include "base/memory/ref_counted.h"
10 #include "base/message_loop/message_loop_proxy.h"
11 #include "base/metrics/histogram.h"
12 #include "base/safe_numerics.h"
13 #include "base/stl_util.h"
14 #include "base/task_runner_util.h"
15 #include "content/child/child_thread.h"
16 #include "content/renderer/media/native_handle_impl.h"
17 #include "media/base/bind_to_current_loop.h"
18 #include "media/filters/gpu_video_accelerator_factories.h"
19 #include "third_party/webrtc/common_video/interface/texture_video_frame.h"
20 #include "third_party/webrtc/system_wrappers/interface/ref_count.h"
22 namespace content {
24 const int32 RTCVideoDecoder::ID_LAST = 0x3FFFFFFF;
25 const int32 RTCVideoDecoder::ID_HALF = 0x20000000;
26 const int32 RTCVideoDecoder::ID_INVALID = -1;
28 // Maximum number of concurrent VDA::Decode() operations RVD will maintain.
29 // Higher values allow better pipelining in the GPU, but also require more
30 // resources.
31 static const size_t kMaxInFlightDecodes = 8;
33 // Size of shared-memory segments we allocate. Since we reuse them we let them
34 // be on the beefy side.
35 static const size_t kSharedMemorySegmentBytes = 100 << 10;
37 // Maximum number of allocated shared-memory segments.
38 static const int kMaxNumSharedMemorySegments = 16;
40 // Maximum number of pending WebRTC buffers that are waiting for the shared
41 // memory. 10 seconds for 30 fps.
42 static const size_t kMaxNumOfPendingBuffers = 300;
44 // A shared memory segment and its allocated size. This class has the ownership
45 // of |shm|.
46 class RTCVideoDecoder::SHMBuffer {
47 public:
48 SHMBuffer(base::SharedMemory* shm, size_t size);
49 ~SHMBuffer();
50 base::SharedMemory* const shm;
51 const size_t size;
54 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size)
55 : shm(shm), size(size) {}
57 RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm->Close(); }
59 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id,
60 uint32_t timestamp,
61 int width,
62 int height,
63 size_t size)
64 : bitstream_buffer_id(bitstream_buffer_id),
65 timestamp(timestamp),
66 width(width),
67 height(height),
68 size(size) {}
70 RTCVideoDecoder::BufferData::BufferData() {}
72 RTCVideoDecoder::BufferData::~BufferData() {}
74 RTCVideoDecoder::RTCVideoDecoder(
75 const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories)
76 : factories_(factories),
77 vda_task_runner_(factories->GetTaskRunner()),
78 decoder_texture_target_(0),
79 next_picture_buffer_id_(0),
80 state_(UNINITIALIZED),
81 decode_complete_callback_(NULL),
82 num_shm_buffers_(0),
83 next_bitstream_buffer_id_(0),
84 reset_bitstream_buffer_id_(ID_INVALID),
85 weak_factory_(this) {
86 DCHECK(!vda_task_runner_->BelongsToCurrentThread());
87 weak_this_ = weak_factory_.GetWeakPtr();
89 base::WaitableEvent message_loop_async_waiter(false, false);
90 // Waiting here is safe. The media thread is stopped in the child thread and
91 // the child thread is blocked when VideoDecoderFactory::CreateVideoDecoder
92 // runs.
93 vda_task_runner_->PostTask(FROM_HERE,
94 base::Bind(&RTCVideoDecoder::Initialize,
95 base::Unretained(this),
96 &message_loop_async_waiter));
97 message_loop_async_waiter.Wait();
100 RTCVideoDecoder::~RTCVideoDecoder() {
101 DVLOG(2) << "~RTCVideoDecoder";
102 // Destroy VDA and remove |this| from the observer if this is vda thread.
103 if (vda_task_runner_->BelongsToCurrentThread()) {
104 base::MessageLoop::current()->RemoveDestructionObserver(this);
105 DestroyVDA();
106 } else {
107 // VDA should have been destroyed in WillDestroyCurrentMessageLoop.
108 DCHECK(!vda_);
111 // Delete all shared memories.
112 STLDeleteElements(&available_shm_segments_);
113 STLDeleteValues(&bitstream_buffers_in_decoder_);
114 STLDeleteContainerPairFirstPointers(decode_buffers_.begin(),
115 decode_buffers_.end());
116 decode_buffers_.clear();
118 // Delete WebRTC input buffers.
119 for (std::deque<std::pair<webrtc::EncodedImage, BufferData> >::iterator it =
120 pending_buffers_.begin();
121 it != pending_buffers_.end();
122 ++it) {
123 delete[] it->first._buffer;
127 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create(
128 webrtc::VideoCodecType type,
129 const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories) {
130 scoped_ptr<RTCVideoDecoder> decoder;
131 // Convert WebRTC codec type to media codec profile.
132 media::VideoCodecProfile profile;
133 switch (type) {
134 case webrtc::kVideoCodecVP8:
135 profile = media::VP8PROFILE_MAIN;
136 break;
137 default:
138 DVLOG(2) << "Video codec not supported:" << type;
139 return decoder.Pass();
142 decoder.reset(new RTCVideoDecoder(factories));
143 decoder->vda_ =
144 factories->CreateVideoDecodeAccelerator(profile, decoder.get()).Pass();
145 // vda can be NULL if VP8 is not supported.
146 if (decoder->vda_ != NULL) {
147 decoder->state_ = INITIALIZED;
148 } else {
149 factories->GetTaskRunner()->DeleteSoon(FROM_HERE, decoder.release());
151 return decoder.Pass();
154 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings,
155 int32_t /*numberOfCores*/) {
156 DVLOG(2) << "InitDecode";
157 DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8);
158 if (codecSettings->codecSpecific.VP8.feedbackModeOn) {
159 LOG(ERROR) << "Feedback mode not supported";
160 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_ERROR);
163 base::AutoLock auto_lock(lock_);
164 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) {
165 LOG(ERROR) << "VDA is not initialized. state=" << state_;
166 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_UNINITIALIZED);
168 // Create some shared memory if the queue is empty.
169 if (available_shm_segments_.size() == 0) {
170 vda_task_runner_->PostTask(FROM_HERE,
171 base::Bind(&RTCVideoDecoder::CreateSHM,
172 weak_this_,
173 kMaxInFlightDecodes,
174 kSharedMemorySegmentBytes));
176 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_OK);
179 int32_t RTCVideoDecoder::Decode(
180 const webrtc::EncodedImage& inputImage,
181 bool missingFrames,
182 const webrtc::RTPFragmentationHeader* /*fragmentation*/,
183 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
184 int64_t /*renderTimeMs*/) {
185 DVLOG(3) << "Decode";
187 base::AutoLock auto_lock(lock_);
189 if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) {
190 LOG(ERROR) << "The decoder has not initialized.";
191 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
194 if (state_ == DECODE_ERROR) {
195 LOG(ERROR) << "Decoding error occurred.";
196 return WEBRTC_VIDEO_CODEC_ERROR;
199 if (missingFrames || !inputImage._completeFrame) {
200 DLOG(ERROR) << "Missing or incomplete frames.";
201 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames.
202 // Return an error to request a key frame.
203 return WEBRTC_VIDEO_CODEC_ERROR;
206 // Most platforms' VDA implementations support mid-stream resolution change
207 // internally. Platforms whose VDAs fail to support mid-stream resolution
208 // change gracefully need to have their clients cover for them, and we do that
209 // here.
210 #ifdef ANDROID
211 const bool kVDACanHandleMidstreamResize = false;
212 #else
213 const bool kVDACanHandleMidstreamResize = true;
214 #endif
216 bool need_to_reset_for_midstream_resize = false;
217 if (inputImage._frameType == webrtc::kKeyFrame) {
218 DVLOG(2) << "Got key frame. size=" << inputImage._encodedWidth << "x"
219 << inputImage._encodedHeight;
220 gfx::Size prev_frame_size = frame_size_;
221 frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight);
222 if (!kVDACanHandleMidstreamResize && !prev_frame_size.IsEmpty() &&
223 prev_frame_size != frame_size_) {
224 need_to_reset_for_midstream_resize = true;
226 } else if (IsFirstBufferAfterReset(next_bitstream_buffer_id_,
227 reset_bitstream_buffer_id_)) {
228 // TODO(wuchengli): VDA should handle it. Remove this when
229 // http://crosbug.com/p/21913 is fixed.
230 DVLOG(1) << "The first frame should be a key frame. Drop this.";
231 return WEBRTC_VIDEO_CODEC_ERROR;
234 // Create buffer metadata.
235 BufferData buffer_data(next_bitstream_buffer_id_,
236 inputImage._timeStamp,
237 frame_size_.width(),
238 frame_size_.height(),
239 inputImage._length);
240 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
241 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST;
243 // If a shared memory segment is available, there are no pending buffers, and
244 // this isn't a mid-stream resolution change, then send the buffer for decode
245 // immediately. Otherwise, save the buffer in the queue for later decode.
246 scoped_ptr<SHMBuffer> shm_buffer;
247 if (!need_to_reset_for_midstream_resize && pending_buffers_.size() == 0)
248 shm_buffer = GetSHM_Locked(inputImage._length);
249 if (!shm_buffer) {
250 if (!SaveToPendingBuffers_Locked(inputImage, buffer_data))
251 return WEBRTC_VIDEO_CODEC_ERROR;
252 if (need_to_reset_for_midstream_resize) {
253 base::AutoUnlock auto_unlock(lock_);
254 Reset();
256 return WEBRTC_VIDEO_CODEC_OK;
259 SaveToDecodeBuffers_Locked(inputImage, shm_buffer.Pass(), buffer_data);
260 vda_task_runner_->PostTask(
261 FROM_HERE, base::Bind(&RTCVideoDecoder::RequestBufferDecode, weak_this_));
262 return WEBRTC_VIDEO_CODEC_OK;
265 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback(
266 webrtc::DecodedImageCallback* callback) {
267 DVLOG(2) << "RegisterDecodeCompleteCallback";
268 base::AutoLock auto_lock(lock_);
269 decode_complete_callback_ = callback;
270 return WEBRTC_VIDEO_CODEC_OK;
273 int32_t RTCVideoDecoder::Release() {
274 DVLOG(2) << "Release";
275 // Do not destroy VDA because WebRTC can call InitDecode and start decoding
276 // again.
277 return Reset();
280 int32_t RTCVideoDecoder::Reset() {
281 DVLOG(2) << "Reset";
282 base::AutoLock auto_lock(lock_);
283 if (state_ == UNINITIALIZED) {
284 LOG(ERROR) << "Decoder not initialized.";
285 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
287 if (next_bitstream_buffer_id_ != 0)
288 reset_bitstream_buffer_id_ = next_bitstream_buffer_id_ - 1;
289 else
290 reset_bitstream_buffer_id_ = ID_LAST;
291 // If VDA is already resetting, no need to request the reset again.
292 if (state_ != RESETTING) {
293 state_ = RESETTING;
294 vda_task_runner_->PostTask(
295 FROM_HERE, base::Bind(&RTCVideoDecoder::ResetInternal, weak_this_));
297 return WEBRTC_VIDEO_CODEC_OK;
300 void RTCVideoDecoder::NotifyInitializeDone() {
301 DVLOG(2) << "NotifyInitializeDone";
302 NOTREACHED();
305 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count,
306 const gfx::Size& size,
307 uint32 texture_target) {
308 DCHECK(vda_task_runner_->BelongsToCurrentThread());
309 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target;
311 if (!vda_)
312 return;
314 std::vector<uint32> texture_ids;
315 std::vector<gpu::Mailbox> texture_mailboxes;
316 decoder_texture_target_ = texture_target;
317 // Discards the sync point returned here since PictureReady will imply that
318 // the produce has already happened, and the texture is ready for use.
319 if (!factories_->CreateTextures(count,
320 size,
321 &texture_ids,
322 &texture_mailboxes,
323 decoder_texture_target_)) {
324 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
325 return;
327 DCHECK_EQ(count, texture_ids.size());
328 DCHECK_EQ(count, texture_mailboxes.size());
330 std::vector<media::PictureBuffer> picture_buffers;
331 for (size_t i = 0; i < texture_ids.size(); ++i) {
332 picture_buffers.push_back(media::PictureBuffer(
333 next_picture_buffer_id_++, size, texture_ids[i], texture_mailboxes[i]));
334 bool inserted = assigned_picture_buffers_.insert(std::make_pair(
335 picture_buffers.back().id(), picture_buffers.back())).second;
336 DCHECK(inserted);
338 vda_->AssignPictureBuffers(picture_buffers);
341 void RTCVideoDecoder::DismissPictureBuffer(int32 id) {
342 DVLOG(3) << "DismissPictureBuffer. id=" << id;
343 DCHECK(vda_task_runner_->BelongsToCurrentThread());
345 std::map<int32, media::PictureBuffer>::iterator it =
346 assigned_picture_buffers_.find(id);
347 if (it == assigned_picture_buffers_.end()) {
348 NOTREACHED() << "Missing picture buffer: " << id;
349 return;
352 media::PictureBuffer buffer_to_dismiss = it->second;
353 assigned_picture_buffers_.erase(it);
355 std::set<int32>::iterator at_display_it =
356 picture_buffers_at_display_.find(id);
358 if (at_display_it == picture_buffers_at_display_.end()) {
359 // We can delete the texture immediately as it's not being displayed.
360 factories_->DeleteTexture(buffer_to_dismiss.texture_id());
361 } else {
362 // Texture in display. Postpone deletion until after it's returned to us.
363 bool inserted = dismissed_picture_buffers_
364 .insert(std::make_pair(id, buffer_to_dismiss)).second;
365 DCHECK(inserted);
369 void RTCVideoDecoder::PictureReady(const media::Picture& picture) {
370 DVLOG(3) << "PictureReady";
371 DCHECK(vda_task_runner_->BelongsToCurrentThread());
373 std::map<int32, media::PictureBuffer>::iterator it =
374 assigned_picture_buffers_.find(picture.picture_buffer_id());
375 if (it == assigned_picture_buffers_.end()) {
376 NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id();
377 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
378 return;
380 const media::PictureBuffer& pb = it->second;
382 // Create a media::VideoFrame.
383 uint32_t timestamp = 0, width = 0, height = 0;
384 size_t size = 0;
385 GetBufferData(
386 picture.bitstream_buffer_id(), &timestamp, &width, &height, &size);
387 scoped_refptr<media::VideoFrame> frame =
388 CreateVideoFrame(picture, pb, timestamp, width, height, size);
389 bool inserted =
390 picture_buffers_at_display_.insert(picture.picture_buffer_id()).second;
391 DCHECK(inserted);
393 // Create a WebRTC video frame.
394 webrtc::RefCountImpl<NativeHandleImpl>* handle =
395 new webrtc::RefCountImpl<NativeHandleImpl>(frame);
396 webrtc::TextureVideoFrame decoded_image(handle, width, height, timestamp, 0);
398 // Invoke decode callback. WebRTC expects no callback after Reset or Release.
400 base::AutoLock auto_lock(lock_);
401 DCHECK(decode_complete_callback_ != NULL);
402 if (IsBufferAfterReset(picture.bitstream_buffer_id(),
403 reset_bitstream_buffer_id_)) {
404 decode_complete_callback_->Decoded(decoded_image);
409 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame(
410 const media::Picture& picture,
411 const media::PictureBuffer& pb,
412 uint32_t timestamp,
413 uint32_t width,
414 uint32_t height,
415 size_t size) {
416 gfx::Rect visible_rect(width, height);
417 gfx::Size natural_size(width, height);
418 DCHECK(decoder_texture_target_);
419 // Convert timestamp from 90KHz to ms.
420 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue(
421 base::checked_numeric_cast<uint64_t>(timestamp) * 1000 / 90);
422 return media::VideoFrame::WrapNativeTexture(
423 make_scoped_ptr(new media::VideoFrame::MailboxHolder(
424 pb.texture_mailbox(),
425 0, // sync_point
426 media::BindToCurrentLoop(
427 base::Bind(&RTCVideoDecoder::ReusePictureBuffer,
428 weak_this_,
429 picture.picture_buffer_id())))),
430 decoder_texture_target_,
431 pb.size(),
432 visible_rect,
433 natural_size,
434 timestamp_ms,
435 base::Bind(&media::GpuVideoAcceleratorFactories::ReadPixels,
436 factories_,
437 pb.texture_id(),
438 natural_size),
439 base::Closure());
442 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) {
443 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id;
444 DCHECK(vda_task_runner_->BelongsToCurrentThread());
446 std::map<int32, SHMBuffer*>::iterator it =
447 bitstream_buffers_in_decoder_.find(id);
448 if (it == bitstream_buffers_in_decoder_.end()) {
449 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
450 NOTREACHED() << "Missing bitstream buffer: " << id;
451 return;
455 base::AutoLock auto_lock(lock_);
456 PutSHM_Locked(scoped_ptr<SHMBuffer>(it->second));
458 bitstream_buffers_in_decoder_.erase(it);
460 RequestBufferDecode();
463 void RTCVideoDecoder::NotifyFlushDone() {
464 DVLOG(3) << "NotifyFlushDone";
465 NOTREACHED() << "Unexpected flush done notification.";
468 void RTCVideoDecoder::NotifyResetDone() {
469 DCHECK(vda_task_runner_->BelongsToCurrentThread());
470 DVLOG(3) << "NotifyResetDone";
472 if (!vda_)
473 return;
475 input_buffer_data_.clear();
477 base::AutoLock auto_lock(lock_);
478 state_ = INITIALIZED;
480 // Send the pending buffers for decoding.
481 RequestBufferDecode();
484 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) {
485 DCHECK(vda_task_runner_->BelongsToCurrentThread());
486 if (!vda_)
487 return;
489 LOG(ERROR) << "VDA Error:" << error;
490 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError",
491 error,
492 media::VideoDecodeAccelerator::LARGEST_ERROR_ENUM);
493 DestroyVDA();
495 base::AutoLock auto_lock(lock_);
496 state_ = DECODE_ERROR;
499 void RTCVideoDecoder::WillDestroyCurrentMessageLoop() {
500 DVLOG(2) << "WillDestroyCurrentMessageLoop";
501 DCHECK(vda_task_runner_->BelongsToCurrentThread());
502 factories_->Abort();
503 weak_factory_.InvalidateWeakPtrs();
504 DestroyVDA();
507 void RTCVideoDecoder::Initialize(base::WaitableEvent* waiter) {
508 DVLOG(2) << "Initialize";
509 DCHECK(vda_task_runner_->BelongsToCurrentThread());
510 base::MessageLoop::current()->AddDestructionObserver(this);
511 waiter->Signal();
514 void RTCVideoDecoder::RequestBufferDecode() {
515 DCHECK(vda_task_runner_->BelongsToCurrentThread());
516 if (!vda_)
517 return;
519 MovePendingBuffersToDecodeBuffers();
521 while (CanMoreDecodeWorkBeDone()) {
522 // Get a buffer and data from the queue.
523 SHMBuffer* shm_buffer = NULL;
524 BufferData buffer_data;
526 base::AutoLock auto_lock(lock_);
527 // Do not request decode if VDA is resetting.
528 if (decode_buffers_.size() == 0 || state_ == RESETTING)
529 return;
530 shm_buffer = decode_buffers_.front().first;
531 buffer_data = decode_buffers_.front().second;
532 decode_buffers_.pop_front();
533 // Drop the buffers before Reset or Release is called.
534 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
535 reset_bitstream_buffer_id_)) {
536 PutSHM_Locked(scoped_ptr<SHMBuffer>(shm_buffer));
537 continue;
541 // Create a BitstreamBuffer and send to VDA to decode.
542 media::BitstreamBuffer bitstream_buffer(buffer_data.bitstream_buffer_id,
543 shm_buffer->shm->handle(),
544 buffer_data.size);
545 bool inserted = bitstream_buffers_in_decoder_
546 .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second;
547 DCHECK(inserted);
548 RecordBufferData(buffer_data);
549 vda_->Decode(bitstream_buffer);
553 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() {
554 return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes;
557 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer, int32 id_reset) {
558 if (id_reset == ID_INVALID)
559 return true;
560 int32 diff = id_buffer - id_reset;
561 if (diff <= 0)
562 diff += ID_LAST + 1;
563 return diff < ID_HALF;
566 bool RTCVideoDecoder::IsFirstBufferAfterReset(int32 id_buffer, int32 id_reset) {
567 if (id_reset == ID_INVALID)
568 return id_buffer == 0;
569 return id_buffer == ((id_reset + 1) & ID_LAST);
572 void RTCVideoDecoder::SaveToDecodeBuffers_Locked(
573 const webrtc::EncodedImage& input_image,
574 scoped_ptr<SHMBuffer> shm_buffer,
575 const BufferData& buffer_data) {
576 memcpy(shm_buffer->shm->memory(), input_image._buffer, input_image._length);
577 std::pair<SHMBuffer*, BufferData> buffer_pair =
578 std::make_pair(shm_buffer.release(), buffer_data);
580 // Store the buffer and the metadata to the queue.
581 decode_buffers_.push_back(buffer_pair);
584 bool RTCVideoDecoder::SaveToPendingBuffers_Locked(
585 const webrtc::EncodedImage& input_image,
586 const BufferData& buffer_data) {
587 DVLOG(2) << "SaveToPendingBuffers_Locked"
588 << ". pending_buffers size=" << pending_buffers_.size()
589 << ". decode_buffers_ size=" << decode_buffers_.size()
590 << ". available_shm size=" << available_shm_segments_.size();
591 // Queued too many buffers. Something goes wrong.
592 if (pending_buffers_.size() >= kMaxNumOfPendingBuffers) {
593 LOG(WARNING) << "Too many pending buffers!";
594 return false;
597 // Clone the input image and save it to the queue.
598 uint8_t* buffer = new uint8_t[input_image._length];
599 // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode()
600 // interface to take a non-const ptr to the frame and add a method to the
601 // frame that will swap buffers with another.
602 memcpy(buffer, input_image._buffer, input_image._length);
603 webrtc::EncodedImage encoded_image(
604 buffer, input_image._length, input_image._length);
605 std::pair<webrtc::EncodedImage, BufferData> buffer_pair =
606 std::make_pair(encoded_image, buffer_data);
608 pending_buffers_.push_back(buffer_pair);
609 return true;
612 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() {
613 base::AutoLock auto_lock(lock_);
614 while (pending_buffers_.size() > 0) {
615 // Get a pending buffer from the queue.
616 const webrtc::EncodedImage& input_image = pending_buffers_.front().first;
617 const BufferData& buffer_data = pending_buffers_.front().second;
619 // Drop the frame if it comes before Reset or Release.
620 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
621 reset_bitstream_buffer_id_)) {
622 delete[] input_image._buffer;
623 pending_buffers_.pop_front();
624 continue;
626 // Get shared memory and save it to decode buffers.
627 scoped_ptr<SHMBuffer> shm_buffer = GetSHM_Locked(input_image._length);
628 if (!shm_buffer)
629 return;
630 SaveToDecodeBuffers_Locked(input_image, shm_buffer.Pass(), buffer_data);
631 delete[] input_image._buffer;
632 pending_buffers_.pop_front();
636 void RTCVideoDecoder::ResetInternal() {
637 DCHECK(vda_task_runner_->BelongsToCurrentThread());
638 DVLOG(2) << "ResetInternal";
639 if (vda_)
640 vda_->Reset();
643 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id,
644 uint32 sync_point) {
645 DCHECK(vda_task_runner_->BelongsToCurrentThread());
646 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id;
648 if (!vda_)
649 return;
651 CHECK(!picture_buffers_at_display_.empty());
653 size_t num_erased = picture_buffers_at_display_.erase(picture_buffer_id);
654 DCHECK(num_erased);
656 std::map<int32, media::PictureBuffer>::iterator it =
657 assigned_picture_buffers_.find(picture_buffer_id);
659 if (it == assigned_picture_buffers_.end()) {
660 // This picture was dismissed while in display, so we postponed deletion.
661 it = dismissed_picture_buffers_.find(picture_buffer_id);
662 DCHECK(it != dismissed_picture_buffers_.end());
663 factories_->DeleteTexture(it->second.texture_id());
664 dismissed_picture_buffers_.erase(it);
665 return;
668 factories_->WaitSyncPoint(sync_point);
670 vda_->ReusePictureBuffer(picture_buffer_id);
673 void RTCVideoDecoder::DestroyTextures() {
674 DCHECK(vda_task_runner_->BelongsToCurrentThread());
675 std::map<int32, media::PictureBuffer>::iterator it;
677 for (it = assigned_picture_buffers_.begin();
678 it != assigned_picture_buffers_.end();
679 ++it) {
680 factories_->DeleteTexture(it->second.texture_id());
682 assigned_picture_buffers_.clear();
684 for (it = dismissed_picture_buffers_.begin();
685 it != dismissed_picture_buffers_.end();
686 ++it) {
687 factories_->DeleteTexture(it->second.texture_id());
689 dismissed_picture_buffers_.clear();
692 void RTCVideoDecoder::DestroyVDA() {
693 DVLOG(2) << "DestroyVDA";
694 DCHECK(vda_task_runner_->BelongsToCurrentThread());
695 if (vda_)
696 vda_.release()->Destroy();
697 DestroyTextures();
698 base::AutoLock auto_lock(lock_);
699 state_ = UNINITIALIZED;
702 scoped_ptr<RTCVideoDecoder::SHMBuffer> RTCVideoDecoder::GetSHM_Locked(
703 size_t min_size) {
704 // Reuse a SHM if possible.
705 SHMBuffer* ret = NULL;
706 if (!available_shm_segments_.empty() &&
707 available_shm_segments_.back()->size >= min_size) {
708 ret = available_shm_segments_.back();
709 available_shm_segments_.pop_back();
711 // Post to vda thread to create shared memory if SHM cannot be reused or the
712 // queue is almost empty.
713 if (num_shm_buffers_ < kMaxNumSharedMemorySegments &&
714 (ret == NULL || available_shm_segments_.size() <= 1)) {
715 vda_task_runner_->PostTask(
716 FROM_HERE,
717 base::Bind(&RTCVideoDecoder::CreateSHM, weak_this_, 1, min_size));
719 return scoped_ptr<SHMBuffer>(ret);
722 void RTCVideoDecoder::PutSHM_Locked(scoped_ptr<SHMBuffer> shm_buffer) {
723 available_shm_segments_.push_back(shm_buffer.release());
726 void RTCVideoDecoder::CreateSHM(int number, size_t min_size) {
727 DCHECK(vda_task_runner_->BelongsToCurrentThread());
728 DVLOG(2) << "CreateSHM. size=" << min_size;
729 int number_to_allocate;
731 base::AutoLock auto_lock(lock_);
732 number_to_allocate =
733 std::min(kMaxNumSharedMemorySegments - num_shm_buffers_, number);
735 size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes);
736 for (int i = 0; i < number_to_allocate; i++) {
737 base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate);
738 if (shm != NULL) {
739 base::AutoLock auto_lock(lock_);
740 num_shm_buffers_++;
741 PutSHM_Locked(
742 scoped_ptr<SHMBuffer>(new SHMBuffer(shm, size_to_allocate)));
745 // Kick off the decoding.
746 RequestBufferDecode();
749 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) {
750 input_buffer_data_.push_front(buffer_data);
751 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but
752 // that's too small for some pathological B-frame test videos. The cost of
753 // using too-high a value is low (192 bits per extra slot).
754 static const size_t kMaxInputBufferDataSize = 128;
755 // Pop from the back of the list, because that's the oldest and least likely
756 // to be useful in the future data.
757 if (input_buffer_data_.size() > kMaxInputBufferDataSize)
758 input_buffer_data_.pop_back();
761 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id,
762 uint32_t* timestamp,
763 uint32_t* width,
764 uint32_t* height,
765 size_t* size) {
766 for (std::list<BufferData>::iterator it = input_buffer_data_.begin();
767 it != input_buffer_data_.end();
768 ++it) {
769 if (it->bitstream_buffer_id != bitstream_buffer_id)
770 continue;
771 *timestamp = it->timestamp;
772 *width = it->width;
773 *height = it->height;
774 return;
776 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id;
779 int32_t RTCVideoDecoder::RecordInitDecodeUMA(int32_t status) {
780 // Logging boolean is enough to know if HW decoding has been used. Also,
781 // InitDecode is less likely to return an error so enum is not used here.
782 bool sample = (status == WEBRTC_VIDEO_CODEC_OK) ? true : false;
783 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", sample);
784 return status;
787 } // namespace content