Update UnusedResources lint suppressions.
[chromium-blink-merge.git] / content / renderer / media / rtc_video_encoder.cc
blobcdadae55cd560cc65a60864302399a9afb21df07
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/rtc_video_encoder.h"
7 #include "base/bind.h"
8 #include "base/location.h"
9 #include "base/logging.h"
10 #include "base/memory/scoped_vector.h"
11 #include "base/metrics/histogram.h"
12 #include "base/rand_util.h"
13 #include "base/single_thread_task_runner.h"
14 #include "base/synchronization/waitable_event.h"
15 #include "base/thread_task_runner_handle.h"
16 #include "media/base/bitstream_buffer.h"
17 #include "media/base/video_frame.h"
18 #include "media/base/video_util.h"
19 #include "media/filters/h264_parser.h"
20 #include "media/renderers/gpu_video_accelerator_factories.h"
21 #include "media/video/video_encode_accelerator.h"
22 #include "third_party/libyuv/include/libyuv.h"
23 #include "third_party/webrtc/system_wrappers/interface/tick_util.h"
25 #define NOTIFY_ERROR(x) \
26 do { \
27 DLOG(ERROR) << "calling NotifyError(): " << x; \
28 NotifyError(x); \
29 } while (0)
31 namespace content {
33 namespace {
35 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to
36 // media::VideoCodecProfile.
37 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile(
38 webrtc::VideoCodecType type, const webrtc::VideoCodec* codec_settings) {
39 DCHECK_EQ(type, codec_settings->codecType);
40 switch (type) {
41 case webrtc::kVideoCodecVP8:
42 return media::VP8PROFILE_ANY;
43 case webrtc::kVideoCodecH264: {
44 switch (codec_settings->codecSpecific.H264.profile) {
45 case webrtc::kProfileBase:
46 return media::H264PROFILE_BASELINE;
47 case webrtc::kProfileMain:
48 return media::H264PROFILE_MAIN;
51 default:
52 NOTREACHED() << "Unrecognized video codec type";
53 return media::VIDEO_CODEC_PROFILE_UNKNOWN;
57 // Populates struct webrtc::RTPFragmentationHeader for H264 codec.
58 // Each entry specifies the offset and length (excluding start code) of a NALU.
59 // Returns true if successful.
60 bool GetRTPFragmentationHeaderH264(webrtc::RTPFragmentationHeader* header,
61 const uint8_t* data, uint32_t length) {
62 media::H264Parser parser;
63 parser.SetStream(data, length);
65 std::vector<media::H264NALU> nalu_vector;
66 while (true) {
67 media::H264NALU nalu;
68 const media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
69 if (result == media::H264Parser::kOk) {
70 nalu_vector.push_back(nalu);
71 } else if (result == media::H264Parser::kEOStream) {
72 break;
73 } else {
74 DLOG(ERROR) << "Unexpected H264 parser result";
75 return false;
79 header->VerifyAndAllocateFragmentationHeader(nalu_vector.size());
80 for (size_t i = 0; i < nalu_vector.size(); ++i) {
81 header->fragmentationOffset[i] = nalu_vector[i].data - data;
82 header->fragmentationLength[i] = nalu_vector[i].size;
83 header->fragmentationPlType[i] = 0;
84 header->fragmentationTimeDiff[i] = 0;
86 return true;
89 } // namespace
91 // This private class of RTCVideoEncoder does the actual work of communicating
92 // with a media::VideoEncodeAccelerator for handling video encoding. It can
93 // be created on any thread, but should subsequently be posted to (and Destroy()
94 // called on) a single thread. Callbacks to RTCVideoEncoder are posted to the
95 // thread on which the instance was constructed.
97 // This class separates state related to the thread that RTCVideoEncoder
98 // operates on (presently the libjingle worker thread) from the thread that
99 // |gpu_factories_| provides for accelerator operations (presently the media
100 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while
101 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA.
102 class RTCVideoEncoder::Impl
103 : public media::VideoEncodeAccelerator::Client,
104 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> {
105 public:
106 Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
107 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories);
109 // Create the VEA and call Initialize() on it. Called once per instantiation,
110 // and then the instance is bound forevermore to whichever thread made the
111 // call.
112 // RTCVideoEncoder expects to be able to call this function synchronously from
113 // its own thread, hence the |async_waiter| and |async_retval| arguments.
114 void CreateAndInitializeVEA(const gfx::Size& input_visible_size,
115 uint32 bitrate,
116 media::VideoCodecProfile profile,
117 base::WaitableEvent* async_waiter,
118 int32_t* async_retval);
119 // Enqueue a frame from WebRTC for encoding.
120 // RTCVideoEncoder expects to be able to call this function synchronously from
121 // its own thread, hence the |async_waiter| and |async_retval| arguments.
122 void Enqueue(const webrtc::VideoFrame* input_frame,
123 bool force_keyframe,
124 base::WaitableEvent* async_waiter,
125 int32_t* async_retval);
127 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the
128 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete,
129 // the buffer is returned to Impl by its index using this function.
130 void UseOutputBitstreamBufferId(int32 bitstream_buffer_id);
132 // Request encoding parameter change for the underlying encoder.
133 void RequestEncodingParametersChange(uint32 bitrate, uint32 framerate);
135 // Destroy this Impl's encoder. The destructor is not explicitly called, as
136 // Impl is a base::RefCountedThreadSafe.
137 void Destroy();
139 // media::VideoEncodeAccelerator::Client implementation.
140 void RequireBitstreamBuffers(unsigned int input_count,
141 const gfx::Size& input_coded_size,
142 size_t output_buffer_size) override;
143 void BitstreamBufferReady(int32 bitstream_buffer_id,
144 size_t payload_size,
145 bool key_frame) override;
146 void NotifyError(media::VideoEncodeAccelerator::Error error) override;
148 private:
149 friend class base::RefCountedThreadSafe<Impl>;
151 enum {
152 kInputBufferExtraCount = 1, // The number of input buffers allocated, more
153 // than what is requested by
154 // VEA::RequireBitstreamBuffers().
155 kOutputBufferCount = 3,
158 ~Impl() override;
160 // Perform encoding on an input frame from the input queue.
161 void EncodeOneFrame();
163 // Notify that an input frame is finished for encoding. |index| is the index
164 // of the completed frame in |input_buffers_|.
165 void EncodeFrameFinished(int index);
167 // Set up/signal |async_waiter_| and |async_retval_|; see declarations below.
168 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval);
169 void SignalAsyncWaiter(int32_t retval);
171 base::ThreadChecker thread_checker_;
173 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client
174 // notifications.
175 const base::WeakPtr<RTCVideoEncoder> weak_encoder_;
177 // The message loop on which to post callbacks to |weak_encoder_|.
178 const scoped_refptr<base::SingleThreadTaskRunner> encoder_task_runner_;
180 // Factory for creating VEAs, shared memory buffers, etc.
181 const scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories_;
183 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous.
184 // Do this by waiting on the |async_waiter_| and returning the return value in
185 // |async_retval_| when initialization completes, encoding completes, or
186 // an error occurs.
187 base::WaitableEvent* async_waiter_;
188 int32_t* async_retval_;
190 // The underlying VEA to perform encoding on.
191 scoped_ptr<media::VideoEncodeAccelerator> video_encoder_;
193 // Next input frame. Since there is at most one next frame, a single-element
194 // queue is sufficient.
195 const webrtc::VideoFrame* input_next_frame_;
197 // Whether to encode a keyframe next.
198 bool input_next_frame_keyframe_;
200 // Frame sizes.
201 gfx::Size input_frame_coded_size_;
202 gfx::Size input_visible_size_;
204 // Shared memory buffers for input/output with the VEA.
205 ScopedVector<base::SharedMemory> input_buffers_;
206 ScopedVector<base::SharedMemory> output_buffers_;
208 // Input buffers ready to be filled with input from Encode(). As a LIFO since
209 // we don't care about ordering.
210 std::vector<int> input_buffers_free_;
212 // The number of output buffers ready to be filled with output from the
213 // encoder.
214 int output_buffers_free_count_;
216 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details.
217 uint16 picture_id_;
219 DISALLOW_COPY_AND_ASSIGN(Impl);
222 RTCVideoEncoder::Impl::Impl(
223 const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
224 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
225 : weak_encoder_(weak_encoder),
226 encoder_task_runner_(base::ThreadTaskRunnerHandle::Get()),
227 gpu_factories_(gpu_factories),
228 async_waiter_(NULL),
229 async_retval_(NULL),
230 input_next_frame_(NULL),
231 input_next_frame_keyframe_(false),
232 output_buffers_free_count_(0) {
233 thread_checker_.DetachFromThread();
234 // Picture ID should start on a random number.
235 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF));
238 void RTCVideoEncoder::Impl::CreateAndInitializeVEA(
239 const gfx::Size& input_visible_size,
240 uint32 bitrate,
241 media::VideoCodecProfile profile,
242 base::WaitableEvent* async_waiter,
243 int32_t* async_retval) {
244 DVLOG(3) << "Impl::CreateAndInitializeVEA()";
245 DCHECK(thread_checker_.CalledOnValidThread());
247 RegisterAsyncWaiter(async_waiter, async_retval);
249 // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
250 if (bitrate > kuint32max / 1000) {
251 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
252 return;
255 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator().Pass();
256 if (!video_encoder_) {
257 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
258 return;
260 input_visible_size_ = input_visible_size;
261 if (!video_encoder_->Initialize(media::VideoFrame::I420,
262 input_visible_size_,
263 profile,
264 bitrate * 1000,
265 this)) {
266 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
267 return;
271 void RTCVideoEncoder::Impl::Enqueue(const webrtc::VideoFrame* input_frame,
272 bool force_keyframe,
273 base::WaitableEvent* async_waiter,
274 int32_t* async_retval) {
275 DVLOG(3) << "Impl::Enqueue()";
276 DCHECK(thread_checker_.CalledOnValidThread());
277 DCHECK(!input_next_frame_);
279 RegisterAsyncWaiter(async_waiter, async_retval);
280 // If there are no free input and output buffers, drop the frame to avoid a
281 // deadlock. If there is a free input buffer, EncodeOneFrame will run and
282 // unblock Encode(). If there are no free input buffers but there is a free
283 // output buffer, EncodeFrameFinished will be called later to unblock
284 // Encode().
286 // The caller of Encode() holds a webrtc lock. The deadlock happens when:
287 // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame().
288 // (2) There are no free input buffers and they cannot be freed because
289 // the encoder has no output buffers.
290 // (3) Output buffers cannot be freed because ReturnEncodedImage is queued
291 // on libjingle worker thread to be run. But the worker thread is waiting
292 // for the same webrtc lock held by the caller of Encode().
294 // Dropping a frame is fine. The encoder has been filled with all input
295 // buffers. Returning an error in Encode() is not fatal and WebRTC will just
296 // continue. If this is a key frame, WebRTC will request a key frame again.
297 // Besides, webrtc will drop a frame if Encode() blocks too long.
298 if (input_buffers_free_.empty() && output_buffers_free_count_ == 0) {
299 DVLOG(2) << "Run out of input and output buffers. Drop the frame.";
300 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
301 return;
303 input_next_frame_ = input_frame;
304 input_next_frame_keyframe_ = force_keyframe;
306 if (!input_buffers_free_.empty())
307 EncodeOneFrame();
310 void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId(
311 int32 bitstream_buffer_id) {
312 DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): "
313 "bitstream_buffer_id=" << bitstream_buffer_id;
314 DCHECK(thread_checker_.CalledOnValidThread());
315 if (video_encoder_) {
316 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
317 bitstream_buffer_id,
318 output_buffers_[bitstream_buffer_id]->handle(),
319 output_buffers_[bitstream_buffer_id]->mapped_size()));
320 output_buffers_free_count_++;
324 void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate,
325 uint32 framerate) {
326 DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate
327 << ", framerate=" << framerate;
328 DCHECK(thread_checker_.CalledOnValidThread());
330 // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
331 if (bitrate > kuint32max / 1000) {
332 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
333 return;
336 if (video_encoder_)
337 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate);
340 void RTCVideoEncoder::Impl::Destroy() {
341 DVLOG(3) << "Impl::Destroy()";
342 DCHECK(thread_checker_.CalledOnValidThread());
343 video_encoder_.reset();
346 void RTCVideoEncoder::Impl::RequireBitstreamBuffers(
347 unsigned int input_count,
348 const gfx::Size& input_coded_size,
349 size_t output_buffer_size) {
350 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count
351 << ", input_coded_size=" << input_coded_size.ToString()
352 << ", output_buffer_size=" << output_buffer_size;
353 DCHECK(thread_checker_.CalledOnValidThread());
355 if (!video_encoder_)
356 return;
358 input_frame_coded_size_ = input_coded_size;
360 for (unsigned int i = 0; i < input_count + kInputBufferExtraCount; ++i) {
361 scoped_ptr<base::SharedMemory> shm =
362 gpu_factories_->CreateSharedMemory(media::VideoFrame::AllocationSize(
363 media::VideoFrame::I420, input_coded_size));
364 if (!shm) {
365 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
366 "failed to create input buffer " << i;
367 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
368 return;
370 input_buffers_.push_back(shm.release());
371 input_buffers_free_.push_back(i);
374 for (int i = 0; i < kOutputBufferCount; ++i) {
375 scoped_ptr<base::SharedMemory> shm =
376 gpu_factories_->CreateSharedMemory(output_buffer_size);
377 if (!shm) {
378 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
379 "failed to create output buffer " << i;
380 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
381 return;
383 output_buffers_.push_back(shm.release());
386 // Immediately provide all output buffers to the VEA.
387 for (size_t i = 0; i < output_buffers_.size(); ++i) {
388 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
389 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size()));
390 output_buffers_free_count_++;
392 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
395 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id,
396 size_t payload_size,
397 bool key_frame) {
398 DVLOG(3) << "Impl::BitstreamBufferReady(): "
399 "bitstream_buffer_id=" << bitstream_buffer_id
400 << ", payload_size=" << payload_size
401 << ", key_frame=" << key_frame;
402 DCHECK(thread_checker_.CalledOnValidThread());
404 if (bitstream_buffer_id < 0 ||
405 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) {
406 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id="
407 << bitstream_buffer_id;
408 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
409 return;
411 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id];
412 if (payload_size > output_buffer->mapped_size()) {
413 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size="
414 << payload_size;
415 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
416 return;
418 output_buffers_free_count_--;
420 // Use webrtc timestamps to ensure correct RTP sender behavior.
421 // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106.
422 const int64 capture_time_us = webrtc::TickTime::MicrosecondTimestamp();
424 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
425 int64 capture_time_ms = capture_time_us / 1000;
426 uint32_t rtp_timestamp = static_cast<uint32_t>(capture_time_us * 90 / 1000);
428 scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage(
429 reinterpret_cast<uint8_t*>(output_buffer->memory()),
430 payload_size,
431 output_buffer->mapped_size()));
432 image->_encodedWidth = input_visible_size_.width();
433 image->_encodedHeight = input_visible_size_.height();
434 image->_timeStamp = rtp_timestamp;
435 image->capture_time_ms_ = capture_time_ms;
436 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
437 image->_completeFrame = true;
439 encoder_task_runner_->PostTask(
440 FROM_HERE,
441 base::Bind(&RTCVideoEncoder::ReturnEncodedImage, weak_encoder_,
442 base::Passed(&image), bitstream_buffer_id, picture_id_));
443 // Picture ID must wrap after reaching the maximum.
444 picture_id_ = (picture_id_ + 1) & 0x7FFF;
447 void RTCVideoEncoder::Impl::NotifyError(
448 media::VideoEncodeAccelerator::Error error) {
449 DVLOG(3) << "Impl::NotifyError(): error=" << error;
450 DCHECK(thread_checker_.CalledOnValidThread());
451 int32_t retval;
452 switch (error) {
453 case media::VideoEncodeAccelerator::kInvalidArgumentError:
454 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
455 break;
456 default:
457 retval = WEBRTC_VIDEO_CODEC_ERROR;
460 video_encoder_.reset();
462 if (async_waiter_) {
463 SignalAsyncWaiter(retval);
464 } else {
465 encoder_task_runner_->PostTask(
466 FROM_HERE,
467 base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval));
471 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); }
473 void RTCVideoEncoder::Impl::EncodeOneFrame() {
474 DVLOG(3) << "Impl::EncodeOneFrame()";
475 DCHECK(thread_checker_.CalledOnValidThread());
476 DCHECK(input_next_frame_);
477 DCHECK(!input_buffers_free_.empty());
479 // EncodeOneFrame() may re-enter EncodeFrameFinished() if VEA::Encode() fails,
480 // we receive a VEA::NotifyError(), and the media::VideoFrame we pass to
481 // Encode() gets destroyed early. Handle this by resetting our
482 // input_next_frame_* state before we hand off the VideoFrame to the VEA.
483 const webrtc::VideoFrame* next_frame = input_next_frame_;
484 bool next_frame_keyframe = input_next_frame_keyframe_;
485 input_next_frame_ = NULL;
486 input_next_frame_keyframe_ = false;
488 if (!video_encoder_) {
489 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
490 return;
493 const int index = input_buffers_free_.back();
494 base::SharedMemory* input_buffer = input_buffers_[index];
495 scoped_refptr<media::VideoFrame> frame =
496 media::VideoFrame::WrapExternalSharedMemory(
497 media::VideoFrame::I420,
498 input_frame_coded_size_,
499 gfx::Rect(input_visible_size_),
500 input_visible_size_,
501 reinterpret_cast<uint8*>(input_buffer->memory()),
502 input_buffer->mapped_size(),
503 input_buffer->handle(),
505 base::TimeDelta());
506 frame->AddDestructionObserver(
507 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index));
508 if (!frame.get()) {
509 DLOG(ERROR) << "Impl::EncodeOneFrame(): failed to create frame";
510 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
511 return;
514 // Do a strided copy of the input frame to match the input requirements for
515 // the encoder.
516 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312
517 if (libyuv::I420Copy(next_frame->buffer(webrtc::kYPlane),
518 next_frame->stride(webrtc::kYPlane),
519 next_frame->buffer(webrtc::kUPlane),
520 next_frame->stride(webrtc::kUPlane),
521 next_frame->buffer(webrtc::kVPlane),
522 next_frame->stride(webrtc::kVPlane),
523 frame->data(media::VideoFrame::kYPlane),
524 frame->stride(media::VideoFrame::kYPlane),
525 frame->data(media::VideoFrame::kUPlane),
526 frame->stride(media::VideoFrame::kUPlane),
527 frame->data(media::VideoFrame::kVPlane),
528 frame->stride(media::VideoFrame::kVPlane),
529 next_frame->width(),
530 next_frame->height())) {
531 DLOG(ERROR) << "Failed to copy buffer";
532 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
533 return;
536 video_encoder_->Encode(frame, next_frame_keyframe);
537 input_buffers_free_.pop_back();
538 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
541 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
542 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
543 DCHECK(thread_checker_.CalledOnValidThread());
544 DCHECK_GE(index, 0);
545 DCHECK_LT(index, static_cast<int>(input_buffers_.size()));
546 input_buffers_free_.push_back(index);
547 if (input_next_frame_)
548 EncodeOneFrame();
551 void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter,
552 int32_t* retval) {
553 DCHECK(thread_checker_.CalledOnValidThread());
554 DCHECK(!async_waiter_);
555 DCHECK(!async_retval_);
556 async_waiter_ = waiter;
557 async_retval_ = retval;
560 void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) {
561 DCHECK(thread_checker_.CalledOnValidThread());
562 *async_retval_ = retval;
563 async_waiter_->Signal();
564 async_retval_ = NULL;
565 async_waiter_ = NULL;
568 #undef NOTIFY_ERROR
570 ////////////////////////////////////////////////////////////////////////////////
572 // RTCVideoEncoder
574 ////////////////////////////////////////////////////////////////////////////////
576 RTCVideoEncoder::RTCVideoEncoder(
577 webrtc::VideoCodecType type,
578 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
579 : video_codec_type_(type),
580 gpu_factories_(gpu_factories),
581 encoded_image_callback_(NULL),
582 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED),
583 weak_factory_(this) {
584 DVLOG(1) << "RTCVideoEncoder(): codec type=" << type;
587 RTCVideoEncoder::~RTCVideoEncoder() {
588 DVLOG(3) << "~RTCVideoEncoder";
589 DCHECK(thread_checker_.CalledOnValidThread());
590 Release();
591 DCHECK(!impl_.get());
594 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
595 int32_t number_of_cores,
596 size_t max_payload_size) {
597 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType
598 << ", width=" << codec_settings->width
599 << ", height=" << codec_settings->height
600 << ", startBitrate=" << codec_settings->startBitrate;
601 DCHECK(thread_checker_.CalledOnValidThread());
602 DCHECK(!impl_.get());
604 media::VideoCodecProfile profile = WebRTCVideoCodecToVideoCodecProfile(
605 video_codec_type_, codec_settings);
607 weak_factory_.InvalidateWeakPtrs();
608 impl_ = new Impl(weak_factory_.GetWeakPtr(), gpu_factories_);
609 base::WaitableEvent initialization_waiter(true, false);
610 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
611 gpu_factories_->GetTaskRunner()->PostTask(
612 FROM_HERE,
613 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA,
614 impl_,
615 gfx::Size(codec_settings->width, codec_settings->height),
616 codec_settings->startBitrate,
617 profile,
618 &initialization_waiter,
619 &initialization_retval));
621 // webrtc::VideoEncoder expects this call to be synchronous.
622 initialization_waiter.Wait();
623 RecordInitEncodeUMA(initialization_retval, profile);
624 return initialization_retval;
627 int32_t RTCVideoEncoder::Encode(
628 const webrtc::VideoFrame& input_image,
629 const webrtc::CodecSpecificInfo* codec_specific_info,
630 const std::vector<webrtc::VideoFrameType>* frame_types) {
631 DVLOG(3) << "Encode()";
632 if (!impl_.get()) {
633 DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_;
634 return impl_status_;
637 bool want_key_frame = frame_types && frame_types->size() &&
638 frame_types->front() == webrtc::kKeyFrame;
639 base::WaitableEvent encode_waiter(true, false);
640 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
641 gpu_factories_->GetTaskRunner()->PostTask(
642 FROM_HERE,
643 base::Bind(&RTCVideoEncoder::Impl::Enqueue,
644 impl_,
645 &input_image,
646 want_key_frame,
647 &encode_waiter,
648 &encode_retval));
650 // webrtc::VideoEncoder expects this call to be synchronous.
651 encode_waiter.Wait();
652 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval;
653 return encode_retval;
656 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback(
657 webrtc::EncodedImageCallback* callback) {
658 DVLOG(3) << "RegisterEncodeCompleteCallback()";
659 DCHECK(thread_checker_.CalledOnValidThread());
660 if (!impl_.get()) {
661 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_;
662 return impl_status_;
665 encoded_image_callback_ = callback;
666 return WEBRTC_VIDEO_CODEC_OK;
669 int32_t RTCVideoEncoder::Release() {
670 DVLOG(3) << "Release()";
671 DCHECK(thread_checker_.CalledOnValidThread());
673 if (impl_.get()) {
674 gpu_factories_->GetTaskRunner()->PostTask(
675 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
676 impl_ = NULL;
677 weak_factory_.InvalidateWeakPtrs();
678 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
680 return WEBRTC_VIDEO_CODEC_OK;
683 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss,
684 int64_t rtt) {
685 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss
686 << ", rtt=" << rtt;
687 // Ignored.
688 return WEBRTC_VIDEO_CODEC_OK;
691 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
692 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate
693 << ", frame_rate=" << frame_rate;
694 if (!impl_.get()) {
695 DVLOG(3) << "SetRates(): returning " << impl_status_;
696 return impl_status_;
699 gpu_factories_->GetTaskRunner()->PostTask(
700 FROM_HERE,
701 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange,
702 impl_,
703 new_bit_rate,
704 frame_rate));
705 return WEBRTC_VIDEO_CODEC_OK;
708 void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image,
709 int32 bitstream_buffer_id,
710 uint16 picture_id) {
711 DCHECK(thread_checker_.CalledOnValidThread());
712 DVLOG(3) << "ReturnEncodedImage(): "
713 << "bitstream_buffer_id=" << bitstream_buffer_id
714 << ", picture_id=" << picture_id;
716 if (!encoded_image_callback_)
717 return;
719 webrtc::RTPFragmentationHeader header;
720 memset(&header, 0, sizeof(header));
721 switch (video_codec_type_) {
722 case webrtc::kVideoCodecVP8:
723 // Generate a header describing a single fragment.
724 header.VerifyAndAllocateFragmentationHeader(1);
725 header.fragmentationOffset[0] = 0;
726 header.fragmentationLength[0] = image->_length;
727 header.fragmentationPlType[0] = 0;
728 header.fragmentationTimeDiff[0] = 0;
729 break;
730 case webrtc::kVideoCodecH264:
731 if (!GetRTPFragmentationHeaderH264(
732 &header, image->_buffer, image->_length)) {
733 DLOG(ERROR) << "Failed to get RTP fragmentation header for H264";
734 NotifyError(WEBRTC_VIDEO_CODEC_ERROR);
735 return;
737 break;
738 default:
739 NOTREACHED() << "Invalid video codec type";
740 return;
743 webrtc::CodecSpecificInfo info;
744 memset(&info, 0, sizeof(info));
745 info.codecType = video_codec_type_;
746 if (video_codec_type_ == webrtc::kVideoCodecVP8) {
747 info.codecSpecific.VP8.pictureId = picture_id;
748 info.codecSpecific.VP8.tl0PicIdx = -1;
749 info.codecSpecific.VP8.keyIdx = -1;
752 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header);
753 if (retval < 0) {
754 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned "
755 << retval;
758 // The call through webrtc::EncodedImageCallback is synchronous, so we can
759 // immediately recycle the output buffer back to the Impl.
760 gpu_factories_->GetTaskRunner()->PostTask(
761 FROM_HERE,
762 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId,
763 impl_,
764 bitstream_buffer_id));
767 void RTCVideoEncoder::NotifyError(int32_t error) {
768 DCHECK(thread_checker_.CalledOnValidThread());
769 DVLOG(1) << "NotifyError(): error=" << error;
771 impl_status_ = error;
772 gpu_factories_->GetTaskRunner()->PostTask(
773 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
774 impl_ = NULL;
777 void RTCVideoEncoder::RecordInitEncodeUMA(
778 int32_t init_retval, media::VideoCodecProfile profile) {
779 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess",
780 init_retval == WEBRTC_VIDEO_CODEC_OK);
781 if (init_retval == WEBRTC_VIDEO_CODEC_OK) {
782 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile",
783 profile,
784 media::VIDEO_CODEC_PROFILE_MAX + 1);
788 } // namespace content