1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/rtc_video_encoder.h"
8 #include "base/location.h"
9 #include "base/logging.h"
10 #include "base/memory/scoped_vector.h"
11 #include "base/metrics/histogram.h"
12 #include "base/rand_util.h"
13 #include "base/single_thread_task_runner.h"
14 #include "base/synchronization/waitable_event.h"
15 #include "base/thread_task_runner_handle.h"
16 #include "media/base/bind_to_current_loop.h"
17 #include "media/base/bitstream_buffer.h"
18 #include "media/base/video_frame.h"
19 #include "media/base/video_util.h"
20 #include "media/filters/h264_parser.h"
21 #include "media/renderers/gpu_video_accelerator_factories.h"
22 #include "media/video/video_encode_accelerator.h"
23 #include "third_party/libyuv/include/libyuv.h"
24 #include "third_party/webrtc/system_wrappers/interface/tick_util.h"
26 #define NOTIFY_ERROR(x) \
28 DLOG(ERROR) << "calling NotifyError(): " << x; \
36 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to
37 // media::VideoCodecProfile.
38 media::VideoCodecProfile
WebRTCVideoCodecToVideoCodecProfile(
39 webrtc::VideoCodecType type
, const webrtc::VideoCodec
* codec_settings
) {
40 DCHECK_EQ(type
, codec_settings
->codecType
);
42 case webrtc::kVideoCodecVP8
:
43 return media::VP8PROFILE_ANY
;
44 case webrtc::kVideoCodecH264
: {
45 switch (codec_settings
->codecSpecific
.H264
.profile
) {
46 case webrtc::kProfileBase
:
47 return media::H264PROFILE_BASELINE
;
48 case webrtc::kProfileMain
:
49 return media::H264PROFILE_MAIN
;
53 NOTREACHED() << "Unrecognized video codec type";
54 return media::VIDEO_CODEC_PROFILE_UNKNOWN
;
58 // Populates struct webrtc::RTPFragmentationHeader for H264 codec.
59 // Each entry specifies the offset and length (excluding start code) of a NALU.
60 // Returns true if successful.
61 bool GetRTPFragmentationHeaderH264(webrtc::RTPFragmentationHeader
* header
,
62 const uint8_t* data
, uint32_t length
) {
63 media::H264Parser parser
;
64 parser
.SetStream(data
, length
);
66 std::vector
<media::H264NALU
> nalu_vector
;
69 const media::H264Parser::Result result
= parser
.AdvanceToNextNALU(&nalu
);
70 if (result
== media::H264Parser::kOk
) {
71 nalu_vector
.push_back(nalu
);
72 } else if (result
== media::H264Parser::kEOStream
) {
75 DLOG(ERROR
) << "Unexpected H264 parser result";
80 header
->VerifyAndAllocateFragmentationHeader(nalu_vector
.size());
81 for (size_t i
= 0; i
< nalu_vector
.size(); ++i
) {
82 header
->fragmentationOffset
[i
] = nalu_vector
[i
].data
- data
;
83 header
->fragmentationLength
[i
] = nalu_vector
[i
].size
;
84 header
->fragmentationPlType
[i
] = 0;
85 header
->fragmentationTimeDiff
[i
] = 0;
92 // This private class of RTCVideoEncoder does the actual work of communicating
93 // with a media::VideoEncodeAccelerator for handling video encoding. It can
94 // be created on any thread, but should subsequently be posted to (and Destroy()
95 // called on) a single thread. Callbacks to RTCVideoEncoder are posted to the
96 // thread on which the instance was constructed.
98 // This class separates state related to the thread that RTCVideoEncoder
99 // operates on (presently the libjingle worker thread) from the thread that
100 // |gpu_factories_| provides for accelerator operations (presently the media
101 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while
102 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA.
103 class RTCVideoEncoder::Impl
104 : public media::VideoEncodeAccelerator::Client
,
105 public base::RefCountedThreadSafe
<RTCVideoEncoder::Impl
> {
107 Impl(const base::WeakPtr
<RTCVideoEncoder
>& weak_encoder
,
108 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& gpu_factories
);
110 // Create the VEA and call Initialize() on it. Called once per instantiation,
111 // and then the instance is bound forevermore to whichever thread made the
113 // RTCVideoEncoder expects to be able to call this function synchronously from
114 // its own thread, hence the |async_waiter| and |async_retval| arguments.
115 void CreateAndInitializeVEA(const gfx::Size
& input_visible_size
,
117 media::VideoCodecProfile profile
,
118 base::WaitableEvent
* async_waiter
,
119 int32_t* async_retval
);
120 // Enqueue a frame from WebRTC for encoding.
121 // RTCVideoEncoder expects to be able to call this function synchronously from
122 // its own thread, hence the |async_waiter| and |async_retval| arguments.
123 void Enqueue(const webrtc::VideoFrame
* input_frame
,
125 base::WaitableEvent
* async_waiter
,
126 int32_t* async_retval
);
128 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the
129 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete,
130 // the buffer is returned to Impl by its index using this function.
131 void UseOutputBitstreamBufferId(int32 bitstream_buffer_id
);
133 // Request encoding parameter change for the underlying encoder.
134 void RequestEncodingParametersChange(uint32 bitrate
, uint32 framerate
);
136 // Destroy this Impl's encoder. The destructor is not explicitly called, as
137 // Impl is a base::RefCountedThreadSafe.
140 // media::VideoEncodeAccelerator::Client implementation.
141 void RequireBitstreamBuffers(unsigned int input_count
,
142 const gfx::Size
& input_coded_size
,
143 size_t output_buffer_size
) override
;
144 void BitstreamBufferReady(int32 bitstream_buffer_id
,
146 bool key_frame
) override
;
147 void NotifyError(media::VideoEncodeAccelerator::Error error
) override
;
150 friend class base::RefCountedThreadSafe
<Impl
>;
153 kInputBufferExtraCount
= 1, // The number of input buffers allocated, more
154 // than what is requested by
155 // VEA::RequireBitstreamBuffers().
156 kOutputBufferCount
= 3,
161 // Perform encoding on an input frame from the input queue.
162 void EncodeOneFrame();
164 // Notify that an input frame is finished for encoding. |index| is the index
165 // of the completed frame in |input_buffers_|.
166 void EncodeFrameFinished(int index
);
168 // Set up/signal |async_waiter_| and |async_retval_|; see declarations below.
169 void RegisterAsyncWaiter(base::WaitableEvent
* waiter
, int32_t* retval
);
170 void SignalAsyncWaiter(int32_t retval
);
172 base::ThreadChecker thread_checker_
;
174 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client
176 const base::WeakPtr
<RTCVideoEncoder
> weak_encoder_
;
178 // The message loop on which to post callbacks to |weak_encoder_|.
179 const scoped_refptr
<base::SingleThreadTaskRunner
> encoder_task_runner_
;
181 // Factory for creating VEAs, shared memory buffers, etc.
182 const scoped_refptr
<media::GpuVideoAcceleratorFactories
> gpu_factories_
;
184 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous.
185 // Do this by waiting on the |async_waiter_| and returning the return value in
186 // |async_retval_| when initialization completes, encoding completes, or
188 base::WaitableEvent
* async_waiter_
;
189 int32_t* async_retval_
;
191 // The underlying VEA to perform encoding on.
192 scoped_ptr
<media::VideoEncodeAccelerator
> video_encoder_
;
194 // Next input frame. Since there is at most one next frame, a single-element
195 // queue is sufficient.
196 const webrtc::VideoFrame
* input_next_frame_
;
198 // Whether to encode a keyframe next.
199 bool input_next_frame_keyframe_
;
202 gfx::Size input_frame_coded_size_
;
203 gfx::Size input_visible_size_
;
205 // Shared memory buffers for input/output with the VEA.
206 ScopedVector
<base::SharedMemory
> input_buffers_
;
207 ScopedVector
<base::SharedMemory
> output_buffers_
;
209 // Input buffers ready to be filled with input from Encode(). As a LIFO since
210 // we don't care about ordering.
211 std::vector
<int> input_buffers_free_
;
213 // The number of output buffers ready to be filled with output from the
215 int output_buffers_free_count_
;
217 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details.
220 DISALLOW_COPY_AND_ASSIGN(Impl
);
223 RTCVideoEncoder::Impl::Impl(
224 const base::WeakPtr
<RTCVideoEncoder
>& weak_encoder
,
225 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& gpu_factories
)
226 : weak_encoder_(weak_encoder
),
227 encoder_task_runner_(base::ThreadTaskRunnerHandle::Get()),
228 gpu_factories_(gpu_factories
),
231 input_next_frame_(NULL
),
232 input_next_frame_keyframe_(false),
233 output_buffers_free_count_(0) {
234 thread_checker_
.DetachFromThread();
235 // Picture ID should start on a random number.
236 picture_id_
= static_cast<uint16_t>(base::RandInt(0, 0x7FFF));
239 void RTCVideoEncoder::Impl::CreateAndInitializeVEA(
240 const gfx::Size
& input_visible_size
,
242 media::VideoCodecProfile profile
,
243 base::WaitableEvent
* async_waiter
,
244 int32_t* async_retval
) {
245 DVLOG(3) << "Impl::CreateAndInitializeVEA()";
246 DCHECK(thread_checker_
.CalledOnValidThread());
248 RegisterAsyncWaiter(async_waiter
, async_retval
);
250 // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
251 if (bitrate
> kuint32max
/ 1000) {
252 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError
);
256 video_encoder_
= gpu_factories_
->CreateVideoEncodeAccelerator().Pass();
257 if (!video_encoder_
) {
258 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError
);
261 input_visible_size_
= input_visible_size
;
262 if (!video_encoder_
->Initialize(media::PIXEL_FORMAT_I420
, input_visible_size_
,
263 profile
, bitrate
* 1000, this)) {
264 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError
);
269 void RTCVideoEncoder::Impl::Enqueue(const webrtc::VideoFrame
* input_frame
,
271 base::WaitableEvent
* async_waiter
,
272 int32_t* async_retval
) {
273 DVLOG(3) << "Impl::Enqueue()";
274 DCHECK(thread_checker_
.CalledOnValidThread());
275 DCHECK(!input_next_frame_
);
277 RegisterAsyncWaiter(async_waiter
, async_retval
);
278 // If there are no free input and output buffers, drop the frame to avoid a
279 // deadlock. If there is a free input buffer, EncodeOneFrame will run and
280 // unblock Encode(). If there are no free input buffers but there is a free
281 // output buffer, EncodeFrameFinished will be called later to unblock
284 // The caller of Encode() holds a webrtc lock. The deadlock happens when:
285 // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame().
286 // (2) There are no free input buffers and they cannot be freed because
287 // the encoder has no output buffers.
288 // (3) Output buffers cannot be freed because ReturnEncodedImage is queued
289 // on libjingle worker thread to be run. But the worker thread is waiting
290 // for the same webrtc lock held by the caller of Encode().
292 // Dropping a frame is fine. The encoder has been filled with all input
293 // buffers. Returning an error in Encode() is not fatal and WebRTC will just
294 // continue. If this is a key frame, WebRTC will request a key frame again.
295 // Besides, webrtc will drop a frame if Encode() blocks too long.
296 if (input_buffers_free_
.empty() && output_buffers_free_count_
== 0) {
297 DVLOG(2) << "Run out of input and output buffers. Drop the frame.";
298 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR
);
301 input_next_frame_
= input_frame
;
302 input_next_frame_keyframe_
= force_keyframe
;
304 if (!input_buffers_free_
.empty())
308 void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId(
309 int32 bitstream_buffer_id
) {
310 DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): "
311 "bitstream_buffer_id=" << bitstream_buffer_id
;
312 DCHECK(thread_checker_
.CalledOnValidThread());
313 if (video_encoder_
) {
314 video_encoder_
->UseOutputBitstreamBuffer(media::BitstreamBuffer(
316 output_buffers_
[bitstream_buffer_id
]->handle(),
317 output_buffers_
[bitstream_buffer_id
]->mapped_size()));
318 output_buffers_free_count_
++;
322 void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate
,
324 DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate
325 << ", framerate=" << framerate
;
326 DCHECK(thread_checker_
.CalledOnValidThread());
328 // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
329 if (bitrate
> kuint32max
/ 1000) {
330 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError
);
335 video_encoder_
->RequestEncodingParametersChange(bitrate
* 1000, framerate
);
338 void RTCVideoEncoder::Impl::Destroy() {
339 DVLOG(3) << "Impl::Destroy()";
340 DCHECK(thread_checker_
.CalledOnValidThread());
341 video_encoder_
.reset();
344 void RTCVideoEncoder::Impl::RequireBitstreamBuffers(
345 unsigned int input_count
,
346 const gfx::Size
& input_coded_size
,
347 size_t output_buffer_size
) {
348 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count
349 << ", input_coded_size=" << input_coded_size
.ToString()
350 << ", output_buffer_size=" << output_buffer_size
;
351 DCHECK(thread_checker_
.CalledOnValidThread());
356 input_frame_coded_size_
= input_coded_size
;
358 for (unsigned int i
= 0; i
< input_count
+ kInputBufferExtraCount
; ++i
) {
359 scoped_ptr
<base::SharedMemory
> shm
=
360 gpu_factories_
->CreateSharedMemory(media::VideoFrame::AllocationSize(
361 media::PIXEL_FORMAT_I420
, input_coded_size
));
363 DLOG(ERROR
) << "Impl::RequireBitstreamBuffers(): "
364 "failed to create input buffer " << i
;
365 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError
);
368 input_buffers_
.push_back(shm
.release());
369 input_buffers_free_
.push_back(i
);
372 for (int i
= 0; i
< kOutputBufferCount
; ++i
) {
373 scoped_ptr
<base::SharedMemory
> shm
=
374 gpu_factories_
->CreateSharedMemory(output_buffer_size
);
376 DLOG(ERROR
) << "Impl::RequireBitstreamBuffers(): "
377 "failed to create output buffer " << i
;
378 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError
);
381 output_buffers_
.push_back(shm
.release());
384 // Immediately provide all output buffers to the VEA.
385 for (size_t i
= 0; i
< output_buffers_
.size(); ++i
) {
386 video_encoder_
->UseOutputBitstreamBuffer(media::BitstreamBuffer(
387 i
, output_buffers_
[i
]->handle(), output_buffers_
[i
]->mapped_size()));
388 output_buffers_free_count_
++;
390 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK
);
393 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id
,
396 DVLOG(3) << "Impl::BitstreamBufferReady(): "
397 "bitstream_buffer_id=" << bitstream_buffer_id
398 << ", payload_size=" << payload_size
399 << ", key_frame=" << key_frame
;
400 DCHECK(thread_checker_
.CalledOnValidThread());
402 if (bitstream_buffer_id
< 0 ||
403 bitstream_buffer_id
>= static_cast<int>(output_buffers_
.size())) {
404 DLOG(ERROR
) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id="
405 << bitstream_buffer_id
;
406 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError
);
409 base::SharedMemory
* output_buffer
= output_buffers_
[bitstream_buffer_id
];
410 if (payload_size
> output_buffer
->mapped_size()) {
411 DLOG(ERROR
) << "Impl::BitstreamBufferReady(): invalid payload_size="
413 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError
);
416 output_buffers_free_count_
--;
418 // Use webrtc timestamps to ensure correct RTP sender behavior.
419 // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106.
420 const int64 capture_time_us
= webrtc::TickTime::MicrosecondTimestamp();
422 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
423 const int64 capture_time_ms
= capture_time_us
/ 1000;
424 const uint32_t rtp_timestamp
=
425 static_cast<uint32_t>(capture_time_us
* 90 / 1000);
427 scoped_ptr
<webrtc::EncodedImage
> image(new webrtc::EncodedImage(
428 reinterpret_cast<uint8_t*>(output_buffer
->memory()),
430 output_buffer
->mapped_size()));
431 image
->_encodedWidth
= input_visible_size_
.width();
432 image
->_encodedHeight
= input_visible_size_
.height();
433 image
->_timeStamp
= rtp_timestamp
;
434 image
->capture_time_ms_
= capture_time_ms
;
435 image
->_frameType
= (key_frame
? webrtc::kKeyFrame
: webrtc::kDeltaFrame
);
436 image
->_completeFrame
= true;
438 encoder_task_runner_
->PostTask(
440 base::Bind(&RTCVideoEncoder::ReturnEncodedImage
, weak_encoder_
,
441 base::Passed(&image
), bitstream_buffer_id
, picture_id_
));
442 // Picture ID must wrap after reaching the maximum.
443 picture_id_
= (picture_id_
+ 1) & 0x7FFF;
446 void RTCVideoEncoder::Impl::NotifyError(
447 media::VideoEncodeAccelerator::Error error
) {
448 DVLOG(3) << "Impl::NotifyError(): error=" << error
;
449 DCHECK(thread_checker_
.CalledOnValidThread());
452 case media::VideoEncodeAccelerator::kInvalidArgumentError
:
453 retval
= WEBRTC_VIDEO_CODEC_ERR_PARAMETER
;
456 retval
= WEBRTC_VIDEO_CODEC_ERROR
;
459 video_encoder_
.reset();
462 SignalAsyncWaiter(retval
);
464 encoder_task_runner_
->PostTask(
466 base::Bind(&RTCVideoEncoder::NotifyError
, weak_encoder_
, retval
));
470 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_
); }
472 void RTCVideoEncoder::Impl::EncodeOneFrame() {
473 DVLOG(3) << "Impl::EncodeOneFrame()";
474 DCHECK(thread_checker_
.CalledOnValidThread());
475 DCHECK(input_next_frame_
);
476 DCHECK(!input_buffers_free_
.empty());
478 // EncodeOneFrame() may re-enter EncodeFrameFinished() if VEA::Encode() fails,
479 // we receive a VEA::NotifyError(), and the media::VideoFrame we pass to
480 // Encode() gets destroyed early. Handle this by resetting our
481 // input_next_frame_* state before we hand off the VideoFrame to the VEA.
482 const webrtc::VideoFrame
* next_frame
= input_next_frame_
;
483 const bool next_frame_keyframe
= input_next_frame_keyframe_
;
484 input_next_frame_
= NULL
;
485 input_next_frame_keyframe_
= false;
487 if (!video_encoder_
) {
488 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR
);
492 const int index
= input_buffers_free_
.back();
493 scoped_refptr
<media::VideoFrame
> frame
;
494 if (next_frame
->native_handle()) {
495 frame
= static_cast<media::VideoFrame
*>(next_frame
->native_handle());
497 base::SharedMemory
* input_buffer
= input_buffers_
[index
];
498 frame
= media::VideoFrame::WrapExternalSharedMemory(
499 media::PIXEL_FORMAT_I420
, input_frame_coded_size_
,
500 gfx::Rect(input_visible_size_
), input_visible_size_
,
501 reinterpret_cast<uint8
*>(input_buffer
->memory()),
502 input_buffer
->mapped_size(), input_buffer
->handle(), 0,
505 DLOG(ERROR
) << "Impl::EncodeOneFrame(): failed to create frame";
506 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError
);
509 // Do a strided copy of the input frame to match the input requirements for
511 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312
512 if (libyuv::I420Copy(next_frame
->buffer(webrtc::kYPlane
),
513 next_frame
->stride(webrtc::kYPlane
),
514 next_frame
->buffer(webrtc::kUPlane
),
515 next_frame
->stride(webrtc::kUPlane
),
516 next_frame
->buffer(webrtc::kVPlane
),
517 next_frame
->stride(webrtc::kVPlane
),
518 frame
->data(media::VideoFrame::kYPlane
),
519 frame
->stride(media::VideoFrame::kYPlane
),
520 frame
->data(media::VideoFrame::kUPlane
),
521 frame
->stride(media::VideoFrame::kUPlane
),
522 frame
->data(media::VideoFrame::kVPlane
),
523 frame
->stride(media::VideoFrame::kVPlane
),
524 next_frame
->width(), next_frame
->height())) {
525 DLOG(ERROR
) << "Failed to copy buffer";
526 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError
);
530 frame
->AddDestructionObserver(media::BindToCurrentLoop(
531 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished
, this, index
)));
532 video_encoder_
->Encode(frame
, next_frame_keyframe
);
533 input_buffers_free_
.pop_back();
534 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK
);
537 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index
) {
538 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index
;
539 DCHECK(thread_checker_
.CalledOnValidThread());
541 DCHECK_LT(index
, static_cast<int>(input_buffers_
.size()));
542 input_buffers_free_
.push_back(index
);
543 if (input_next_frame_
)
547 void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent
* waiter
,
549 DCHECK(thread_checker_
.CalledOnValidThread());
550 DCHECK(!async_waiter_
);
551 DCHECK(!async_retval_
);
552 async_waiter_
= waiter
;
553 async_retval_
= retval
;
556 void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval
) {
557 DCHECK(thread_checker_
.CalledOnValidThread());
558 *async_retval_
= retval
;
559 async_waiter_
->Signal();
560 async_retval_
= NULL
;
561 async_waiter_
= NULL
;
566 ////////////////////////////////////////////////////////////////////////////////
570 ////////////////////////////////////////////////////////////////////////////////
572 RTCVideoEncoder::RTCVideoEncoder(
573 webrtc::VideoCodecType type
,
574 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& gpu_factories
)
575 : video_codec_type_(type
),
576 gpu_factories_(gpu_factories
),
577 encoded_image_callback_(NULL
),
578 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED
),
579 weak_factory_(this) {
580 DVLOG(1) << "RTCVideoEncoder(): codec type=" << type
;
583 RTCVideoEncoder::~RTCVideoEncoder() {
584 DVLOG(3) << "~RTCVideoEncoder";
585 DCHECK(thread_checker_
.CalledOnValidThread());
587 DCHECK(!impl_
.get());
590 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec
* codec_settings
,
591 int32_t number_of_cores
,
592 size_t max_payload_size
) {
593 DVLOG(1) << "InitEncode(): codecType=" << codec_settings
->codecType
594 << ", width=" << codec_settings
->width
595 << ", height=" << codec_settings
->height
596 << ", startBitrate=" << codec_settings
->startBitrate
;
597 DCHECK(thread_checker_
.CalledOnValidThread());
598 DCHECK(!impl_
.get());
600 const media::VideoCodecProfile profile
=
601 WebRTCVideoCodecToVideoCodecProfile(video_codec_type_
, codec_settings
);
603 weak_factory_
.InvalidateWeakPtrs();
604 impl_
= new Impl(weak_factory_
.GetWeakPtr(), gpu_factories_
);
605 base::WaitableEvent
initialization_waiter(true, false);
606 int32_t initialization_retval
= WEBRTC_VIDEO_CODEC_UNINITIALIZED
;
607 gpu_factories_
->GetTaskRunner()->PostTask(
609 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA
,
611 gfx::Size(codec_settings
->width
, codec_settings
->height
),
612 codec_settings
->startBitrate
,
614 &initialization_waiter
,
615 &initialization_retval
));
617 // webrtc::VideoEncoder expects this call to be synchronous.
618 initialization_waiter
.Wait();
619 RecordInitEncodeUMA(initialization_retval
, profile
);
620 return initialization_retval
;
623 int32_t RTCVideoEncoder::Encode(
624 const webrtc::VideoFrame
& input_image
,
625 const webrtc::CodecSpecificInfo
* codec_specific_info
,
626 const std::vector
<webrtc::VideoFrameType
>* frame_types
) {
627 DVLOG(3) << "Encode()";
629 DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_
;
633 const bool want_key_frame
= frame_types
&& frame_types
->size() &&
634 frame_types
->front() == webrtc::kKeyFrame
;
635 base::WaitableEvent
encode_waiter(true, false);
636 int32_t encode_retval
= WEBRTC_VIDEO_CODEC_UNINITIALIZED
;
637 gpu_factories_
->GetTaskRunner()->PostTask(
639 base::Bind(&RTCVideoEncoder::Impl::Enqueue
,
646 // webrtc::VideoEncoder expects this call to be synchronous.
647 encode_waiter
.Wait();
648 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval
;
649 return encode_retval
;
652 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback(
653 webrtc::EncodedImageCallback
* callback
) {
654 DVLOG(3) << "RegisterEncodeCompleteCallback()";
655 DCHECK(thread_checker_
.CalledOnValidThread());
657 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_
;
661 encoded_image_callback_
= callback
;
662 return WEBRTC_VIDEO_CODEC_OK
;
665 int32_t RTCVideoEncoder::Release() {
666 DVLOG(3) << "Release()";
667 DCHECK(thread_checker_
.CalledOnValidThread());
670 gpu_factories_
->GetTaskRunner()->PostTask(
671 FROM_HERE
, base::Bind(&RTCVideoEncoder::Impl::Destroy
, impl_
));
673 weak_factory_
.InvalidateWeakPtrs();
674 impl_status_
= WEBRTC_VIDEO_CODEC_UNINITIALIZED
;
676 return WEBRTC_VIDEO_CODEC_OK
;
679 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss
,
681 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss
684 return WEBRTC_VIDEO_CODEC_OK
;
687 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate
, uint32_t frame_rate
) {
688 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate
689 << ", frame_rate=" << frame_rate
;
691 DVLOG(3) << "SetRates(): returning " << impl_status_
;
695 gpu_factories_
->GetTaskRunner()->PostTask(
697 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange
,
701 return WEBRTC_VIDEO_CODEC_OK
;
704 void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr
<webrtc::EncodedImage
> image
,
705 int32 bitstream_buffer_id
,
707 DCHECK(thread_checker_
.CalledOnValidThread());
708 DVLOG(3) << "ReturnEncodedImage(): "
709 << "bitstream_buffer_id=" << bitstream_buffer_id
710 << ", picture_id=" << picture_id
;
712 if (!encoded_image_callback_
)
715 webrtc::RTPFragmentationHeader header
;
716 memset(&header
, 0, sizeof(header
));
717 switch (video_codec_type_
) {
718 case webrtc::kVideoCodecVP8
:
719 // Generate a header describing a single fragment.
720 header
.VerifyAndAllocateFragmentationHeader(1);
721 header
.fragmentationOffset
[0] = 0;
722 header
.fragmentationLength
[0] = image
->_length
;
723 header
.fragmentationPlType
[0] = 0;
724 header
.fragmentationTimeDiff
[0] = 0;
726 case webrtc::kVideoCodecH264
:
727 if (!GetRTPFragmentationHeaderH264(
728 &header
, image
->_buffer
, image
->_length
)) {
729 DLOG(ERROR
) << "Failed to get RTP fragmentation header for H264";
730 NotifyError(WEBRTC_VIDEO_CODEC_ERROR
);
735 NOTREACHED() << "Invalid video codec type";
739 webrtc::CodecSpecificInfo info
;
740 memset(&info
, 0, sizeof(info
));
741 info
.codecType
= video_codec_type_
;
742 if (video_codec_type_
== webrtc::kVideoCodecVP8
) {
743 info
.codecSpecific
.VP8
.pictureId
= picture_id
;
744 info
.codecSpecific
.VP8
.tl0PicIdx
= -1;
745 info
.codecSpecific
.VP8
.keyIdx
= -1;
748 int32_t retval
= encoded_image_callback_
->Encoded(*image
, &info
, &header
);
750 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned "
754 // The call through webrtc::EncodedImageCallback is synchronous, so we can
755 // immediately recycle the output buffer back to the Impl.
756 gpu_factories_
->GetTaskRunner()->PostTask(
758 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId
,
760 bitstream_buffer_id
));
763 void RTCVideoEncoder::NotifyError(int32_t error
) {
764 DCHECK(thread_checker_
.CalledOnValidThread());
765 DVLOG(1) << "NotifyError(): error=" << error
;
767 impl_status_
= error
;
768 gpu_factories_
->GetTaskRunner()->PostTask(
769 FROM_HERE
, base::Bind(&RTCVideoEncoder::Impl::Destroy
, impl_
));
773 void RTCVideoEncoder::RecordInitEncodeUMA(
774 int32_t init_retval
, media::VideoCodecProfile profile
) {
775 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess",
776 init_retval
== WEBRTC_VIDEO_CODEC_OK
);
777 if (init_retval
== WEBRTC_VIDEO_CODEC_OK
) {
778 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile",
780 media::VIDEO_CODEC_PROFILE_MAX
+ 1);
784 } // namespace content