Roll src/third_party/WebKit d10c917:a1123a1 (svn 198729:198730)
[chromium-blink-merge.git] / content / renderer / media / rtc_video_encoder.cc
blob995766aaef4e25b2ec6adf8fa439c8605429b9bf
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/rtc_video_encoder.h"
7 #include "base/bind.h"
8 #include "base/location.h"
9 #include "base/logging.h"
10 #include "base/memory/scoped_vector.h"
11 #include "base/metrics/histogram.h"
12 #include "base/rand_util.h"
13 #include "base/single_thread_task_runner.h"
14 #include "base/synchronization/waitable_event.h"
15 #include "base/thread_task_runner_handle.h"
16 #include "media/base/bind_to_current_loop.h"
17 #include "media/base/bitstream_buffer.h"
18 #include "media/base/video_frame.h"
19 #include "media/base/video_util.h"
20 #include "media/filters/h264_parser.h"
21 #include "media/renderers/gpu_video_accelerator_factories.h"
22 #include "media/video/video_encode_accelerator.h"
23 #include "third_party/libyuv/include/libyuv.h"
24 #include "third_party/webrtc/system_wrappers/interface/tick_util.h"
26 #define NOTIFY_ERROR(x) \
27 do { \
28 DLOG(ERROR) << "calling NotifyError(): " << x; \
29 NotifyError(x); \
30 } while (0)
32 namespace content {
34 namespace {
36 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to
37 // media::VideoCodecProfile.
38 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile(
39 webrtc::VideoCodecType type, const webrtc::VideoCodec* codec_settings) {
40 DCHECK_EQ(type, codec_settings->codecType);
41 switch (type) {
42 case webrtc::kVideoCodecVP8:
43 return media::VP8PROFILE_ANY;
44 case webrtc::kVideoCodecH264: {
45 switch (codec_settings->codecSpecific.H264.profile) {
46 case webrtc::kProfileBase:
47 return media::H264PROFILE_BASELINE;
48 case webrtc::kProfileMain:
49 return media::H264PROFILE_MAIN;
52 default:
53 NOTREACHED() << "Unrecognized video codec type";
54 return media::VIDEO_CODEC_PROFILE_UNKNOWN;
58 // Populates struct webrtc::RTPFragmentationHeader for H264 codec.
59 // Each entry specifies the offset and length (excluding start code) of a NALU.
60 // Returns true if successful.
61 bool GetRTPFragmentationHeaderH264(webrtc::RTPFragmentationHeader* header,
62 const uint8_t* data, uint32_t length) {
63 media::H264Parser parser;
64 parser.SetStream(data, length);
66 std::vector<media::H264NALU> nalu_vector;
67 while (true) {
68 media::H264NALU nalu;
69 const media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
70 if (result == media::H264Parser::kOk) {
71 nalu_vector.push_back(nalu);
72 } else if (result == media::H264Parser::kEOStream) {
73 break;
74 } else {
75 DLOG(ERROR) << "Unexpected H264 parser result";
76 return false;
80 header->VerifyAndAllocateFragmentationHeader(nalu_vector.size());
81 for (size_t i = 0; i < nalu_vector.size(); ++i) {
82 header->fragmentationOffset[i] = nalu_vector[i].data - data;
83 header->fragmentationLength[i] = nalu_vector[i].size;
84 header->fragmentationPlType[i] = 0;
85 header->fragmentationTimeDiff[i] = 0;
87 return true;
90 } // namespace
92 // This private class of RTCVideoEncoder does the actual work of communicating
93 // with a media::VideoEncodeAccelerator for handling video encoding. It can
94 // be created on any thread, but should subsequently be posted to (and Destroy()
95 // called on) a single thread. Callbacks to RTCVideoEncoder are posted to the
96 // thread on which the instance was constructed.
98 // This class separates state related to the thread that RTCVideoEncoder
99 // operates on (presently the libjingle worker thread) from the thread that
100 // |gpu_factories_| provides for accelerator operations (presently the media
101 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while
102 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA.
103 class RTCVideoEncoder::Impl
104 : public media::VideoEncodeAccelerator::Client,
105 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> {
106 public:
107 Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
108 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories);
110 // Create the VEA and call Initialize() on it. Called once per instantiation,
111 // and then the instance is bound forevermore to whichever thread made the
112 // call.
113 // RTCVideoEncoder expects to be able to call this function synchronously from
114 // its own thread, hence the |async_waiter| and |async_retval| arguments.
115 void CreateAndInitializeVEA(const gfx::Size& input_visible_size,
116 uint32 bitrate,
117 media::VideoCodecProfile profile,
118 base::WaitableEvent* async_waiter,
119 int32_t* async_retval);
120 // Enqueue a frame from WebRTC for encoding.
121 // RTCVideoEncoder expects to be able to call this function synchronously from
122 // its own thread, hence the |async_waiter| and |async_retval| arguments.
123 void Enqueue(const webrtc::VideoFrame* input_frame,
124 bool force_keyframe,
125 base::WaitableEvent* async_waiter,
126 int32_t* async_retval);
128 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the
129 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete,
130 // the buffer is returned to Impl by its index using this function.
131 void UseOutputBitstreamBufferId(int32 bitstream_buffer_id);
133 // Request encoding parameter change for the underlying encoder.
134 void RequestEncodingParametersChange(uint32 bitrate, uint32 framerate);
136 // Destroy this Impl's encoder. The destructor is not explicitly called, as
137 // Impl is a base::RefCountedThreadSafe.
138 void Destroy();
140 // media::VideoEncodeAccelerator::Client implementation.
141 void RequireBitstreamBuffers(unsigned int input_count,
142 const gfx::Size& input_coded_size,
143 size_t output_buffer_size) override;
144 void BitstreamBufferReady(int32 bitstream_buffer_id,
145 size_t payload_size,
146 bool key_frame) override;
147 void NotifyError(media::VideoEncodeAccelerator::Error error) override;
149 private:
150 friend class base::RefCountedThreadSafe<Impl>;
152 enum {
153 kInputBufferExtraCount = 1, // The number of input buffers allocated, more
154 // than what is requested by
155 // VEA::RequireBitstreamBuffers().
156 kOutputBufferCount = 3,
159 ~Impl() override;
161 // Perform encoding on an input frame from the input queue.
162 void EncodeOneFrame();
164 // Notify that an input frame is finished for encoding. |index| is the index
165 // of the completed frame in |input_buffers_|.
166 void EncodeFrameFinished(int index);
168 // Set up/signal |async_waiter_| and |async_retval_|; see declarations below.
169 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval);
170 void SignalAsyncWaiter(int32_t retval);
172 base::ThreadChecker thread_checker_;
174 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client
175 // notifications.
176 const base::WeakPtr<RTCVideoEncoder> weak_encoder_;
178 // The message loop on which to post callbacks to |weak_encoder_|.
179 const scoped_refptr<base::SingleThreadTaskRunner> encoder_task_runner_;
181 // Factory for creating VEAs, shared memory buffers, etc.
182 const scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories_;
184 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous.
185 // Do this by waiting on the |async_waiter_| and returning the return value in
186 // |async_retval_| when initialization completes, encoding completes, or
187 // an error occurs.
188 base::WaitableEvent* async_waiter_;
189 int32_t* async_retval_;
191 // The underlying VEA to perform encoding on.
192 scoped_ptr<media::VideoEncodeAccelerator> video_encoder_;
194 // Next input frame. Since there is at most one next frame, a single-element
195 // queue is sufficient.
196 const webrtc::VideoFrame* input_next_frame_;
198 // Whether to encode a keyframe next.
199 bool input_next_frame_keyframe_;
201 // Frame sizes.
202 gfx::Size input_frame_coded_size_;
203 gfx::Size input_visible_size_;
205 // Shared memory buffers for input/output with the VEA.
206 ScopedVector<base::SharedMemory> input_buffers_;
207 ScopedVector<base::SharedMemory> output_buffers_;
209 // Input buffers ready to be filled with input from Encode(). As a LIFO since
210 // we don't care about ordering.
211 std::vector<int> input_buffers_free_;
213 // The number of output buffers ready to be filled with output from the
214 // encoder.
215 int output_buffers_free_count_;
217 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details.
218 uint16 picture_id_;
220 DISALLOW_COPY_AND_ASSIGN(Impl);
223 RTCVideoEncoder::Impl::Impl(
224 const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
225 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
226 : weak_encoder_(weak_encoder),
227 encoder_task_runner_(base::ThreadTaskRunnerHandle::Get()),
228 gpu_factories_(gpu_factories),
229 async_waiter_(NULL),
230 async_retval_(NULL),
231 input_next_frame_(NULL),
232 input_next_frame_keyframe_(false),
233 output_buffers_free_count_(0) {
234 thread_checker_.DetachFromThread();
235 // Picture ID should start on a random number.
236 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF));
239 void RTCVideoEncoder::Impl::CreateAndInitializeVEA(
240 const gfx::Size& input_visible_size,
241 uint32 bitrate,
242 media::VideoCodecProfile profile,
243 base::WaitableEvent* async_waiter,
244 int32_t* async_retval) {
245 DVLOG(3) << "Impl::CreateAndInitializeVEA()";
246 DCHECK(thread_checker_.CalledOnValidThread());
248 RegisterAsyncWaiter(async_waiter, async_retval);
250 // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
251 if (bitrate > kuint32max / 1000) {
252 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
253 return;
256 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator().Pass();
257 if (!video_encoder_) {
258 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
259 return;
261 input_visible_size_ = input_visible_size;
262 if (!video_encoder_->Initialize(media::VideoFrame::I420,
263 input_visible_size_,
264 profile,
265 bitrate * 1000,
266 this)) {
267 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
268 return;
272 void RTCVideoEncoder::Impl::Enqueue(const webrtc::VideoFrame* input_frame,
273 bool force_keyframe,
274 base::WaitableEvent* async_waiter,
275 int32_t* async_retval) {
276 DVLOG(3) << "Impl::Enqueue()";
277 DCHECK(thread_checker_.CalledOnValidThread());
278 DCHECK(!input_next_frame_);
280 RegisterAsyncWaiter(async_waiter, async_retval);
281 // If there are no free input and output buffers, drop the frame to avoid a
282 // deadlock. If there is a free input buffer, EncodeOneFrame will run and
283 // unblock Encode(). If there are no free input buffers but there is a free
284 // output buffer, EncodeFrameFinished will be called later to unblock
285 // Encode().
287 // The caller of Encode() holds a webrtc lock. The deadlock happens when:
288 // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame().
289 // (2) There are no free input buffers and they cannot be freed because
290 // the encoder has no output buffers.
291 // (3) Output buffers cannot be freed because ReturnEncodedImage is queued
292 // on libjingle worker thread to be run. But the worker thread is waiting
293 // for the same webrtc lock held by the caller of Encode().
295 // Dropping a frame is fine. The encoder has been filled with all input
296 // buffers. Returning an error in Encode() is not fatal and WebRTC will just
297 // continue. If this is a key frame, WebRTC will request a key frame again.
298 // Besides, webrtc will drop a frame if Encode() blocks too long.
299 if (input_buffers_free_.empty() && output_buffers_free_count_ == 0) {
300 DVLOG(2) << "Run out of input and output buffers. Drop the frame.";
301 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
302 return;
304 input_next_frame_ = input_frame;
305 input_next_frame_keyframe_ = force_keyframe;
307 if (!input_buffers_free_.empty())
308 EncodeOneFrame();
311 void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId(
312 int32 bitstream_buffer_id) {
313 DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): "
314 "bitstream_buffer_id=" << bitstream_buffer_id;
315 DCHECK(thread_checker_.CalledOnValidThread());
316 if (video_encoder_) {
317 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
318 bitstream_buffer_id,
319 output_buffers_[bitstream_buffer_id]->handle(),
320 output_buffers_[bitstream_buffer_id]->mapped_size()));
321 output_buffers_free_count_++;
325 void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate,
326 uint32 framerate) {
327 DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate
328 << ", framerate=" << framerate;
329 DCHECK(thread_checker_.CalledOnValidThread());
331 // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
332 if (bitrate > kuint32max / 1000) {
333 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
334 return;
337 if (video_encoder_)
338 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate);
341 void RTCVideoEncoder::Impl::Destroy() {
342 DVLOG(3) << "Impl::Destroy()";
343 DCHECK(thread_checker_.CalledOnValidThread());
344 video_encoder_.reset();
347 void RTCVideoEncoder::Impl::RequireBitstreamBuffers(
348 unsigned int input_count,
349 const gfx::Size& input_coded_size,
350 size_t output_buffer_size) {
351 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count
352 << ", input_coded_size=" << input_coded_size.ToString()
353 << ", output_buffer_size=" << output_buffer_size;
354 DCHECK(thread_checker_.CalledOnValidThread());
356 if (!video_encoder_)
357 return;
359 input_frame_coded_size_ = input_coded_size;
361 for (unsigned int i = 0; i < input_count + kInputBufferExtraCount; ++i) {
362 scoped_ptr<base::SharedMemory> shm =
363 gpu_factories_->CreateSharedMemory(media::VideoFrame::AllocationSize(
364 media::VideoFrame::I420, input_coded_size));
365 if (!shm) {
366 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
367 "failed to create input buffer " << i;
368 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
369 return;
371 input_buffers_.push_back(shm.release());
372 input_buffers_free_.push_back(i);
375 for (int i = 0; i < kOutputBufferCount; ++i) {
376 scoped_ptr<base::SharedMemory> shm =
377 gpu_factories_->CreateSharedMemory(output_buffer_size);
378 if (!shm) {
379 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
380 "failed to create output buffer " << i;
381 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
382 return;
384 output_buffers_.push_back(shm.release());
387 // Immediately provide all output buffers to the VEA.
388 for (size_t i = 0; i < output_buffers_.size(); ++i) {
389 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
390 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size()));
391 output_buffers_free_count_++;
393 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
396 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id,
397 size_t payload_size,
398 bool key_frame) {
399 DVLOG(3) << "Impl::BitstreamBufferReady(): "
400 "bitstream_buffer_id=" << bitstream_buffer_id
401 << ", payload_size=" << payload_size
402 << ", key_frame=" << key_frame;
403 DCHECK(thread_checker_.CalledOnValidThread());
405 if (bitstream_buffer_id < 0 ||
406 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) {
407 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id="
408 << bitstream_buffer_id;
409 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
410 return;
412 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id];
413 if (payload_size > output_buffer->mapped_size()) {
414 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size="
415 << payload_size;
416 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
417 return;
419 output_buffers_free_count_--;
421 // Use webrtc timestamps to ensure correct RTP sender behavior.
422 // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106.
423 const int64 capture_time_us = webrtc::TickTime::MicrosecondTimestamp();
425 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
426 const int64 capture_time_ms = capture_time_us / 1000;
427 const uint32_t rtp_timestamp =
428 static_cast<uint32_t>(capture_time_us * 90 / 1000);
430 scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage(
431 reinterpret_cast<uint8_t*>(output_buffer->memory()),
432 payload_size,
433 output_buffer->mapped_size()));
434 image->_encodedWidth = input_visible_size_.width();
435 image->_encodedHeight = input_visible_size_.height();
436 image->_timeStamp = rtp_timestamp;
437 image->capture_time_ms_ = capture_time_ms;
438 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
439 image->_completeFrame = true;
441 encoder_task_runner_->PostTask(
442 FROM_HERE,
443 base::Bind(&RTCVideoEncoder::ReturnEncodedImage, weak_encoder_,
444 base::Passed(&image), bitstream_buffer_id, picture_id_));
445 // Picture ID must wrap after reaching the maximum.
446 picture_id_ = (picture_id_ + 1) & 0x7FFF;
449 void RTCVideoEncoder::Impl::NotifyError(
450 media::VideoEncodeAccelerator::Error error) {
451 DVLOG(3) << "Impl::NotifyError(): error=" << error;
452 DCHECK(thread_checker_.CalledOnValidThread());
453 int32_t retval;
454 switch (error) {
455 case media::VideoEncodeAccelerator::kInvalidArgumentError:
456 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
457 break;
458 default:
459 retval = WEBRTC_VIDEO_CODEC_ERROR;
462 video_encoder_.reset();
464 if (async_waiter_) {
465 SignalAsyncWaiter(retval);
466 } else {
467 encoder_task_runner_->PostTask(
468 FROM_HERE,
469 base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval));
473 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); }
475 void RTCVideoEncoder::Impl::EncodeOneFrame() {
476 DVLOG(3) << "Impl::EncodeOneFrame()";
477 DCHECK(thread_checker_.CalledOnValidThread());
478 DCHECK(input_next_frame_);
479 DCHECK(!input_buffers_free_.empty());
481 // EncodeOneFrame() may re-enter EncodeFrameFinished() if VEA::Encode() fails,
482 // we receive a VEA::NotifyError(), and the media::VideoFrame we pass to
483 // Encode() gets destroyed early. Handle this by resetting our
484 // input_next_frame_* state before we hand off the VideoFrame to the VEA.
485 const webrtc::VideoFrame* next_frame = input_next_frame_;
486 const bool next_frame_keyframe = input_next_frame_keyframe_;
487 input_next_frame_ = NULL;
488 input_next_frame_keyframe_ = false;
490 if (!video_encoder_) {
491 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
492 return;
495 const int index = input_buffers_free_.back();
496 scoped_refptr<media::VideoFrame> frame;
497 if (next_frame->native_handle()) {
498 frame = static_cast<media::VideoFrame*>(next_frame->native_handle());
499 } else {
500 base::SharedMemory* input_buffer = input_buffers_[index];
501 frame = media::VideoFrame::WrapExternalSharedMemory(
502 media::VideoFrame::I420,
503 input_frame_coded_size_,
504 gfx::Rect(input_visible_size_),
505 input_visible_size_,
506 reinterpret_cast<uint8*>(input_buffer->memory()),
507 input_buffer->mapped_size(),
508 input_buffer->handle(),
510 base::TimeDelta());
511 if (!frame.get()) {
512 DLOG(ERROR) << "Impl::EncodeOneFrame(): failed to create frame";
513 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
514 return;
516 // Do a strided copy of the input frame to match the input requirements for
517 // the encoder.
518 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312
519 if (libyuv::I420Copy(next_frame->buffer(webrtc::kYPlane),
520 next_frame->stride(webrtc::kYPlane),
521 next_frame->buffer(webrtc::kUPlane),
522 next_frame->stride(webrtc::kUPlane),
523 next_frame->buffer(webrtc::kVPlane),
524 next_frame->stride(webrtc::kVPlane),
525 frame->data(media::VideoFrame::kYPlane),
526 frame->stride(media::VideoFrame::kYPlane),
527 frame->data(media::VideoFrame::kUPlane),
528 frame->stride(media::VideoFrame::kUPlane),
529 frame->data(media::VideoFrame::kVPlane),
530 frame->stride(media::VideoFrame::kVPlane),
531 next_frame->width(), next_frame->height())) {
532 DLOG(ERROR) << "Failed to copy buffer";
533 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
534 return;
537 frame->AddDestructionObserver(media::BindToCurrentLoop(
538 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index)));
539 video_encoder_->Encode(frame, next_frame_keyframe);
540 input_buffers_free_.pop_back();
541 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
544 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
545 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
546 DCHECK(thread_checker_.CalledOnValidThread());
547 DCHECK_GE(index, 0);
548 DCHECK_LT(index, static_cast<int>(input_buffers_.size()));
549 input_buffers_free_.push_back(index);
550 if (input_next_frame_)
551 EncodeOneFrame();
554 void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter,
555 int32_t* retval) {
556 DCHECK(thread_checker_.CalledOnValidThread());
557 DCHECK(!async_waiter_);
558 DCHECK(!async_retval_);
559 async_waiter_ = waiter;
560 async_retval_ = retval;
563 void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) {
564 DCHECK(thread_checker_.CalledOnValidThread());
565 *async_retval_ = retval;
566 async_waiter_->Signal();
567 async_retval_ = NULL;
568 async_waiter_ = NULL;
571 #undef NOTIFY_ERROR
573 ////////////////////////////////////////////////////////////////////////////////
575 // RTCVideoEncoder
577 ////////////////////////////////////////////////////////////////////////////////
579 RTCVideoEncoder::RTCVideoEncoder(
580 webrtc::VideoCodecType type,
581 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
582 : video_codec_type_(type),
583 gpu_factories_(gpu_factories),
584 encoded_image_callback_(NULL),
585 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED),
586 weak_factory_(this) {
587 DVLOG(1) << "RTCVideoEncoder(): codec type=" << type;
590 RTCVideoEncoder::~RTCVideoEncoder() {
591 DVLOG(3) << "~RTCVideoEncoder";
592 DCHECK(thread_checker_.CalledOnValidThread());
593 Release();
594 DCHECK(!impl_.get());
597 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
598 int32_t number_of_cores,
599 size_t max_payload_size) {
600 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType
601 << ", width=" << codec_settings->width
602 << ", height=" << codec_settings->height
603 << ", startBitrate=" << codec_settings->startBitrate;
604 DCHECK(thread_checker_.CalledOnValidThread());
605 DCHECK(!impl_.get());
607 const media::VideoCodecProfile profile =
608 WebRTCVideoCodecToVideoCodecProfile(video_codec_type_, codec_settings);
610 weak_factory_.InvalidateWeakPtrs();
611 impl_ = new Impl(weak_factory_.GetWeakPtr(), gpu_factories_);
612 base::WaitableEvent initialization_waiter(true, false);
613 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
614 gpu_factories_->GetTaskRunner()->PostTask(
615 FROM_HERE,
616 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA,
617 impl_,
618 gfx::Size(codec_settings->width, codec_settings->height),
619 codec_settings->startBitrate,
620 profile,
621 &initialization_waiter,
622 &initialization_retval));
624 // webrtc::VideoEncoder expects this call to be synchronous.
625 initialization_waiter.Wait();
626 RecordInitEncodeUMA(initialization_retval, profile);
627 return initialization_retval;
630 int32_t RTCVideoEncoder::Encode(
631 const webrtc::VideoFrame& input_image,
632 const webrtc::CodecSpecificInfo* codec_specific_info,
633 const std::vector<webrtc::VideoFrameType>* frame_types) {
634 DVLOG(3) << "Encode()";
635 if (!impl_.get()) {
636 DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_;
637 return impl_status_;
640 const bool want_key_frame = frame_types && frame_types->size() &&
641 frame_types->front() == webrtc::kKeyFrame;
642 base::WaitableEvent encode_waiter(true, false);
643 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
644 gpu_factories_->GetTaskRunner()->PostTask(
645 FROM_HERE,
646 base::Bind(&RTCVideoEncoder::Impl::Enqueue,
647 impl_,
648 &input_image,
649 want_key_frame,
650 &encode_waiter,
651 &encode_retval));
653 // webrtc::VideoEncoder expects this call to be synchronous.
654 encode_waiter.Wait();
655 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval;
656 return encode_retval;
659 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback(
660 webrtc::EncodedImageCallback* callback) {
661 DVLOG(3) << "RegisterEncodeCompleteCallback()";
662 DCHECK(thread_checker_.CalledOnValidThread());
663 if (!impl_.get()) {
664 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_;
665 return impl_status_;
668 encoded_image_callback_ = callback;
669 return WEBRTC_VIDEO_CODEC_OK;
672 int32_t RTCVideoEncoder::Release() {
673 DVLOG(3) << "Release()";
674 DCHECK(thread_checker_.CalledOnValidThread());
676 if (impl_.get()) {
677 gpu_factories_->GetTaskRunner()->PostTask(
678 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
679 impl_ = NULL;
680 weak_factory_.InvalidateWeakPtrs();
681 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
683 return WEBRTC_VIDEO_CODEC_OK;
686 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss,
687 int64_t rtt) {
688 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss
689 << ", rtt=" << rtt;
690 // Ignored.
691 return WEBRTC_VIDEO_CODEC_OK;
694 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
695 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate
696 << ", frame_rate=" << frame_rate;
697 if (!impl_.get()) {
698 DVLOG(3) << "SetRates(): returning " << impl_status_;
699 return impl_status_;
702 gpu_factories_->GetTaskRunner()->PostTask(
703 FROM_HERE,
704 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange,
705 impl_,
706 new_bit_rate,
707 frame_rate));
708 return WEBRTC_VIDEO_CODEC_OK;
711 void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image,
712 int32 bitstream_buffer_id,
713 uint16 picture_id) {
714 DCHECK(thread_checker_.CalledOnValidThread());
715 DVLOG(3) << "ReturnEncodedImage(): "
716 << "bitstream_buffer_id=" << bitstream_buffer_id
717 << ", picture_id=" << picture_id;
719 if (!encoded_image_callback_)
720 return;
722 webrtc::RTPFragmentationHeader header;
723 memset(&header, 0, sizeof(header));
724 switch (video_codec_type_) {
725 case webrtc::kVideoCodecVP8:
726 // Generate a header describing a single fragment.
727 header.VerifyAndAllocateFragmentationHeader(1);
728 header.fragmentationOffset[0] = 0;
729 header.fragmentationLength[0] = image->_length;
730 header.fragmentationPlType[0] = 0;
731 header.fragmentationTimeDiff[0] = 0;
732 break;
733 case webrtc::kVideoCodecH264:
734 if (!GetRTPFragmentationHeaderH264(
735 &header, image->_buffer, image->_length)) {
736 DLOG(ERROR) << "Failed to get RTP fragmentation header for H264";
737 NotifyError(WEBRTC_VIDEO_CODEC_ERROR);
738 return;
740 break;
741 default:
742 NOTREACHED() << "Invalid video codec type";
743 return;
746 webrtc::CodecSpecificInfo info;
747 memset(&info, 0, sizeof(info));
748 info.codecType = video_codec_type_;
749 if (video_codec_type_ == webrtc::kVideoCodecVP8) {
750 info.codecSpecific.VP8.pictureId = picture_id;
751 info.codecSpecific.VP8.tl0PicIdx = -1;
752 info.codecSpecific.VP8.keyIdx = -1;
755 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header);
756 if (retval < 0) {
757 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned "
758 << retval;
761 // The call through webrtc::EncodedImageCallback is synchronous, so we can
762 // immediately recycle the output buffer back to the Impl.
763 gpu_factories_->GetTaskRunner()->PostTask(
764 FROM_HERE,
765 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId,
766 impl_,
767 bitstream_buffer_id));
770 void RTCVideoEncoder::NotifyError(int32_t error) {
771 DCHECK(thread_checker_.CalledOnValidThread());
772 DVLOG(1) << "NotifyError(): error=" << error;
774 impl_status_ = error;
775 gpu_factories_->GetTaskRunner()->PostTask(
776 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
777 impl_ = NULL;
780 void RTCVideoEncoder::RecordInitEncodeUMA(
781 int32_t init_retval, media::VideoCodecProfile profile) {
782 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess",
783 init_retval == WEBRTC_VIDEO_CODEC_OK);
784 if (init_retval == WEBRTC_VIDEO_CODEC_OK) {
785 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile",
786 profile,
787 media::VIDEO_CODEC_PROFILE_MAX + 1);
791 } // namespace content