Supervised user whitelists: Cleanup
[chromium-blink-merge.git] / content / common / gpu / media / vaapi_video_encode_accelerator.cc
blob3ea4f91d58882642f3c1f10e23458c0e54fadac7
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/gpu/media/vaapi_video_encode_accelerator.h"
7 #include "base/bind.h"
8 #include "base/callback.h"
9 #include "base/message_loop/message_loop_proxy.h"
10 #include "base/metrics/histogram.h"
11 #include "base/numerics/safe_conversions.h"
12 #include "content/common/gpu/media/h264_dpb.h"
13 #include "media/base/bind_to_current_loop.h"
14 #include "third_party/libva/va/va_enc_h264.h"
16 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): "
18 #define NOTIFY_ERROR(error, msg) \
19 do { \
20 SetState(kError); \
21 LOG(ERROR) << msg; \
22 LOG(ERROR) << "Calling NotifyError(" << error << ")";\
23 NotifyError(error); \
24 } while (0)
26 namespace content {
28 namespace {
29 // Need 2 surfaces for each frame: one for input data and one for
30 // reconstructed picture, which is later used for reference.
31 const size_t kMinSurfacesToEncode = 2;
33 // Subjectively chosen.
34 const size_t kNumInputBuffers = 4;
35 const size_t kMaxNumReferenceFrames = 4;
37 // We need up to kMaxNumReferenceFrames surfaces for reference, plus one
38 // for input and one for encode (which will be added to the set of reference
39 // frames for subsequent frames). Actual execution of HW encode is done
40 // in parallel, and we want to process more frames in the meantime.
41 // To have kNumInputBuffers in flight, we need a full set of reference +
42 // encode surfaces (i.e. kMaxNumReferenceFrames + kMinSurfacesToEncode), and
43 // (kNumInputBuffers - 1) of kMinSurfacesToEncode for the remaining frames
44 // in flight.
45 const size_t kNumSurfaces = kMaxNumReferenceFrames + kMinSurfacesToEncode +
46 kMinSurfacesToEncode * (kNumInputBuffers - 1);
48 // An IDR every 2048 frames, an I frame every 256 and no B frames.
49 // We choose IDR period to equal MaxFrameNum so it must be a power of 2.
50 const int kIDRPeriod = 2048;
51 const int kIPeriod = 256;
52 const int kIPPeriod = 1;
54 const int kDefaultFramerate = 30;
56 // HRD parameters (ch. E.2.2 in spec).
57 const int kBitRateScale = 0; // bit_rate_scale for SPS HRD parameters.
58 const int kCPBSizeScale = 0; // cpb_size_scale for SPS HRD parameters.
60 const int kDefaultQP = 26;
61 // All Intel codecs can do at least 4.1.
62 const int kDefaultLevelIDC = 41;
63 const int kChromaFormatIDC = 1; // 4:2:0
65 // Arbitrarily chosen bitrate window size for rate control, in ms.
66 const int kCPBWindowSizeMs = 1500;
68 // UMA errors that the VaapiVideoEncodeAccelerator class reports.
69 enum VAVEAEncoderFailure {
70 VAAPI_ERROR = 0,
71 VAVEA_ENCODER_FAILURES_MAX,
76 // Round |value| up to |alignment|, which must be a power of 2.
77 static inline size_t RoundUpToPowerOf2(size_t value, size_t alignment) {
78 // Check that |alignment| is a power of 2.
79 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1)));
80 return ((value + (alignment - 1)) & ~(alignment - 1));
83 static void ReportToUMA(VAVEAEncoderFailure failure) {
84 UMA_HISTOGRAM_ENUMERATION(
85 "Media.VAVEA.EncoderFailure",
86 failure,
87 VAVEA_ENCODER_FAILURES_MAX);
90 struct VaapiVideoEncodeAccelerator::InputFrameRef {
91 InputFrameRef(const scoped_refptr<media::VideoFrame>& frame,
92 bool force_keyframe)
93 : frame(frame), force_keyframe(force_keyframe) {}
94 const scoped_refptr<media::VideoFrame> frame;
95 const bool force_keyframe;
98 struct VaapiVideoEncodeAccelerator::BitstreamBufferRef {
99 BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size)
100 : id(id), shm(shm.Pass()), size(size) {}
101 const int32 id;
102 const scoped_ptr<base::SharedMemory> shm;
103 const size_t size;
106 media::VideoEncodeAccelerator::SupportedProfiles
107 VaapiVideoEncodeAccelerator::GetSupportedProfiles() {
108 return VaapiWrapper::GetSupportedEncodeProfiles();
111 static unsigned int Log2OfPowerOf2(unsigned int x) {
112 CHECK_GT(x, 0u);
113 DCHECK_EQ(x & (x - 1), 0u);
115 int log = 0;
116 while (x > 1) {
117 x >>= 1;
118 ++log;
120 return log;
123 VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator()
124 : profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN),
125 mb_width_(0),
126 mb_height_(0),
127 output_buffer_byte_size_(0),
128 state_(kUninitialized),
129 frame_num_(0),
130 idr_pic_id_(0),
131 bitrate_(0),
132 framerate_(0),
133 cpb_size_(0),
134 encoding_parameters_changed_(false),
135 encoder_thread_("VAVEAEncoderThread"),
136 child_message_loop_proxy_(base::MessageLoopProxy::current()),
137 weak_this_ptr_factory_(this) {
138 DVLOGF(4);
139 weak_this_ = weak_this_ptr_factory_.GetWeakPtr();
141 max_ref_idx_l0_size_ = kMaxNumReferenceFrames;
142 qp_ = kDefaultQP;
143 idr_period_ = kIDRPeriod;
144 i_period_ = kIPeriod;
145 ip_period_ = kIPPeriod;
148 VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() {
149 DVLOGF(4);
150 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
151 DCHECK(!encoder_thread_.IsRunning());
154 bool VaapiVideoEncodeAccelerator::Initialize(
155 media::VideoFrame::Format format,
156 const gfx::Size& input_visible_size,
157 media::VideoCodecProfile output_profile,
158 uint32 initial_bitrate,
159 Client* client) {
160 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
161 DCHECK(!encoder_thread_.IsRunning());
162 DCHECK_EQ(state_, kUninitialized);
164 DVLOGF(1) << "Initializing VAVEA, input_format: "
165 << media::VideoFrame::FormatToString(format)
166 << ", input_visible_size: " << input_visible_size.ToString()
167 << ", output_profile: " << output_profile
168 << ", initial_bitrate: " << initial_bitrate;
170 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
171 client_ = client_ptr_factory_->GetWeakPtr();
173 if (output_profile < media::H264PROFILE_BASELINE ||
174 output_profile > media::H264PROFILE_MAIN) {
175 DVLOGF(1) << "Unsupported output profile: " << output_profile;
176 return false;
179 if (format != media::VideoFrame::I420) {
180 DVLOGF(1) << "Unsupported input format: "
181 << media::VideoFrame::FormatToString(format);
182 return false;
185 profile_ = output_profile;
186 visible_size_ = input_visible_size;
187 // 4:2:0 format has to be 2-aligned.
188 DCHECK_EQ(visible_size_.width() % 2, 0);
189 DCHECK_EQ(visible_size_.height() % 2, 0);
190 coded_size_ = gfx::Size(RoundUpToPowerOf2(visible_size_.width(), 16),
191 RoundUpToPowerOf2(visible_size_.height(), 16));
192 mb_width_ = coded_size_.width() / 16;
193 mb_height_ = coded_size_.height() / 16;
194 output_buffer_byte_size_ = coded_size_.GetArea();
196 UpdateRates(initial_bitrate, kDefaultFramerate);
198 vaapi_wrapper_ =
199 VaapiWrapper::CreateForVideoCodec(VaapiWrapper::kEncode, output_profile,
200 base::Bind(&ReportToUMA, VAAPI_ERROR));
201 if (!vaapi_wrapper_.get()) {
202 DVLOGF(1) << "Failed initializing VAAPI for profile " << output_profile;
203 return false;
206 if (!encoder_thread_.Start()) {
207 LOG(ERROR) << "Failed to start encoder thread";
208 return false;
210 encoder_thread_proxy_ = encoder_thread_.message_loop_proxy();
212 // Finish the remaining initialization on the encoder thread.
213 encoder_thread_proxy_->PostTask(
214 FROM_HERE,
215 base::Bind(&VaapiVideoEncodeAccelerator::InitializeTask,
216 base::Unretained(this)));
218 return true;
221 void VaapiVideoEncodeAccelerator::InitializeTask() {
222 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread());
223 DCHECK_EQ(state_, kUninitialized);
224 DVLOGF(4);
226 va_surface_release_cb_ = media::BindToCurrentLoop(
227 base::Bind(&VaapiVideoEncodeAccelerator::RecycleVASurfaceID,
228 base::Unretained(this)));
230 if (!vaapi_wrapper_->CreateSurfaces(
231 coded_size_, kNumSurfaces, &available_va_surface_ids_)) {
232 NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces");
233 return;
236 UpdateSPS();
237 GeneratePackedSPS();
239 UpdatePPS();
240 GeneratePackedPPS();
242 child_message_loop_proxy_->PostTask(
243 FROM_HERE,
244 base::Bind(&Client::RequireBitstreamBuffers,
245 client_,
246 kNumInputBuffers,
247 coded_size_,
248 output_buffer_byte_size_));
250 SetState(kEncoding);
253 void VaapiVideoEncodeAccelerator::RecycleVASurfaceID(
254 VASurfaceID va_surface_id) {
255 DVLOGF(4) << "va_surface_id: " << va_surface_id;
256 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread());
258 available_va_surface_ids_.push_back(va_surface_id);
259 EncodeFrameTask();
262 void VaapiVideoEncodeAccelerator::BeginFrame(bool force_keyframe) {
263 current_pic_ = new H264Picture();
265 // If the current picture is an IDR picture, frame_num shall be equal to 0.
266 if (force_keyframe)
267 frame_num_ = 0;
269 current_pic_->frame_num = frame_num_++;
270 frame_num_ %= idr_period_;
272 if (current_pic_->frame_num == 0) {
273 current_pic_->idr = true;
274 // H264 spec mandates idr_pic_id to differ between two consecutive IDRs.
275 idr_pic_id_ ^= 1;
276 ref_pic_list0_.clear();
279 if (current_pic_->frame_num % i_period_ == 0)
280 current_pic_->type = media::H264SliceHeader::kISlice;
281 else
282 current_pic_->type = media::H264SliceHeader::kPSlice;
284 if (current_pic_->type != media::H264SliceHeader::kBSlice)
285 current_pic_->ref = true;
287 current_pic_->pic_order_cnt = current_pic_->frame_num * 2;
288 current_pic_->top_field_order_cnt = current_pic_->pic_order_cnt;
289 current_pic_->pic_order_cnt_lsb = current_pic_->pic_order_cnt;
291 current_encode_job_->keyframe = current_pic_->idr;
293 DVLOGF(4) << "Starting a new frame, type: " << current_pic_->type
294 << (force_keyframe ? " (forced keyframe)" : "")
295 << " frame_num: " << current_pic_->frame_num
296 << " POC: " << current_pic_->pic_order_cnt;
299 void VaapiVideoEncodeAccelerator::EndFrame() {
300 DCHECK(current_pic_);
301 // Store the picture on the list of reference pictures and keep the list
302 // below maximum size, dropping oldest references.
303 if (current_pic_->ref)
304 ref_pic_list0_.push_front(current_encode_job_->recon_surface);
305 size_t max_num_ref_frames =
306 base::checked_cast<size_t>(current_sps_.max_num_ref_frames);
307 while (ref_pic_list0_.size() > max_num_ref_frames)
308 ref_pic_list0_.pop_back();
310 submitted_encode_jobs_.push(make_linked_ptr(current_encode_job_.release()));
313 static void InitVAPicture(VAPictureH264* va_pic) {
314 memset(va_pic, 0, sizeof(*va_pic));
315 va_pic->picture_id = VA_INVALID_ID;
316 va_pic->flags = VA_PICTURE_H264_INVALID;
319 bool VaapiVideoEncodeAccelerator::SubmitFrameParameters() {
320 DCHECK(current_pic_);
321 VAEncSequenceParameterBufferH264 seq_param;
322 memset(&seq_param, 0, sizeof(seq_param));
324 #define SPS_TO_SP(a) seq_param.a = current_sps_.a;
325 SPS_TO_SP(seq_parameter_set_id);
326 SPS_TO_SP(level_idc);
328 seq_param.intra_period = i_period_;
329 seq_param.intra_idr_period = idr_period_;
330 seq_param.ip_period = ip_period_;
331 seq_param.bits_per_second = bitrate_;
333 SPS_TO_SP(max_num_ref_frames);
334 seq_param.picture_width_in_mbs = mb_width_;
335 seq_param.picture_height_in_mbs = mb_height_;
337 #define SPS_TO_SP_FS(a) seq_param.seq_fields.bits.a = current_sps_.a;
338 SPS_TO_SP_FS(chroma_format_idc);
339 SPS_TO_SP_FS(frame_mbs_only_flag);
340 SPS_TO_SP_FS(log2_max_frame_num_minus4);
341 SPS_TO_SP_FS(pic_order_cnt_type);
342 SPS_TO_SP_FS(log2_max_pic_order_cnt_lsb_minus4);
343 #undef SPS_TO_SP_FS
345 SPS_TO_SP(bit_depth_luma_minus8);
346 SPS_TO_SP(bit_depth_chroma_minus8);
348 SPS_TO_SP(frame_cropping_flag);
349 if (current_sps_.frame_cropping_flag) {
350 SPS_TO_SP(frame_crop_left_offset);
351 SPS_TO_SP(frame_crop_right_offset);
352 SPS_TO_SP(frame_crop_top_offset);
353 SPS_TO_SP(frame_crop_bottom_offset);
356 SPS_TO_SP(vui_parameters_present_flag);
357 #define SPS_TO_SP_VF(a) seq_param.vui_fields.bits.a = current_sps_.a;
358 SPS_TO_SP_VF(timing_info_present_flag);
359 #undef SPS_TO_SP_VF
360 SPS_TO_SP(num_units_in_tick);
361 SPS_TO_SP(time_scale);
362 #undef SPS_TO_SP
364 if (!vaapi_wrapper_->SubmitBuffer(VAEncSequenceParameterBufferType,
365 sizeof(seq_param),
366 &seq_param))
367 return false;
369 VAEncPictureParameterBufferH264 pic_param;
370 memset(&pic_param, 0, sizeof(pic_param));
372 pic_param.CurrPic.picture_id = current_encode_job_->recon_surface->id();
373 pic_param.CurrPic.TopFieldOrderCnt = current_pic_->top_field_order_cnt;
374 pic_param.CurrPic.BottomFieldOrderCnt = current_pic_->bottom_field_order_cnt;
375 pic_param.CurrPic.flags = 0;
377 for (size_t i = 0; i < arraysize(pic_param.ReferenceFrames); ++i)
378 InitVAPicture(&pic_param.ReferenceFrames[i]);
380 DCHECK_LE(ref_pic_list0_.size(), arraysize(pic_param.ReferenceFrames));
381 RefPicList::const_iterator iter = ref_pic_list0_.begin();
382 for (size_t i = 0;
383 i < arraysize(pic_param.ReferenceFrames) && iter != ref_pic_list0_.end();
384 ++iter, ++i) {
385 pic_param.ReferenceFrames[i].picture_id = (*iter)->id();
386 pic_param.ReferenceFrames[i].flags = 0;
389 pic_param.coded_buf = current_encode_job_->coded_buffer;
390 pic_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id;
391 pic_param.seq_parameter_set_id = current_pps_.seq_parameter_set_id;
392 pic_param.frame_num = current_pic_->frame_num;
393 pic_param.pic_init_qp = qp_;
394 pic_param.num_ref_idx_l0_active_minus1 = max_ref_idx_l0_size_ - 1;
395 pic_param.pic_fields.bits.idr_pic_flag = current_pic_->idr;
396 pic_param.pic_fields.bits.reference_pic_flag = current_pic_->ref;
397 #define PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = current_pps_.a;
398 PPS_TO_PP_PF(entropy_coding_mode_flag);
399 PPS_TO_PP_PF(transform_8x8_mode_flag);
400 PPS_TO_PP_PF(deblocking_filter_control_present_flag);
401 #undef PPS_TO_PP_PF
403 if (!vaapi_wrapper_->SubmitBuffer(VAEncPictureParameterBufferType,
404 sizeof(pic_param),
405 &pic_param))
406 return false;
408 VAEncSliceParameterBufferH264 slice_param;
409 memset(&slice_param, 0, sizeof(slice_param));
411 slice_param.num_macroblocks = mb_width_ * mb_height_;
412 slice_param.macroblock_info = VA_INVALID_ID;
413 slice_param.slice_type = current_pic_->type;
414 slice_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id;
415 slice_param.idr_pic_id = idr_pic_id_;
416 slice_param.pic_order_cnt_lsb = current_pic_->pic_order_cnt_lsb;
417 slice_param.num_ref_idx_active_override_flag = true;
419 for (size_t i = 0; i < arraysize(slice_param.RefPicList0); ++i)
420 InitVAPicture(&slice_param.RefPicList0[i]);
422 for (size_t i = 0; i < arraysize(slice_param.RefPicList1); ++i)
423 InitVAPicture(&slice_param.RefPicList1[i]);
425 DCHECK_LE(ref_pic_list0_.size(), arraysize(slice_param.RefPicList0));
426 iter = ref_pic_list0_.begin();
427 for (size_t i = 0;
428 i < arraysize(slice_param.RefPicList0) && iter != ref_pic_list0_.end();
429 ++iter, ++i) {
430 InitVAPicture(&slice_param.RefPicList0[i]);
431 slice_param.RefPicList0[i].picture_id = (*iter)->id();
432 slice_param.RefPicList0[i].flags = 0;
435 if (!vaapi_wrapper_->SubmitBuffer(VAEncSliceParameterBufferType,
436 sizeof(slice_param),
437 &slice_param))
438 return false;
440 VAEncMiscParameterRateControl rate_control_param;
441 memset(&rate_control_param, 0, sizeof(rate_control_param));
442 rate_control_param.bits_per_second = bitrate_;
443 rate_control_param.target_percentage = 90;
444 rate_control_param.window_size = kCPBWindowSizeMs;
445 rate_control_param.initial_qp = qp_;
446 rate_control_param.rc_flags.bits.disable_frame_skip = true;
448 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(
449 VAEncMiscParameterTypeRateControl,
450 sizeof(rate_control_param),
451 &rate_control_param))
452 return false;
454 VAEncMiscParameterFrameRate framerate_param;
455 memset(&framerate_param, 0, sizeof(framerate_param));
456 framerate_param.framerate = framerate_;
457 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(
458 VAEncMiscParameterTypeFrameRate,
459 sizeof(framerate_param),
460 &framerate_param))
461 return false;
463 VAEncMiscParameterHRD hrd_param;
464 memset(&hrd_param, 0, sizeof(hrd_param));
465 hrd_param.buffer_size = cpb_size_;
466 hrd_param.initial_buffer_fullness = cpb_size_ / 2;
467 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(VAEncMiscParameterTypeHRD,
468 sizeof(hrd_param),
469 &hrd_param))
470 return false;
472 return true;
475 bool VaapiVideoEncodeAccelerator::SubmitHeadersIfNeeded() {
476 DCHECK(current_pic_);
477 if (current_pic_->type != media::H264SliceHeader::kISlice)
478 return true;
480 // Submit PPS.
481 VAEncPackedHeaderParameterBuffer par_buffer;
482 memset(&par_buffer, 0, sizeof(par_buffer));
483 par_buffer.type = VAEncPackedHeaderSequence;
484 par_buffer.bit_length = packed_sps_.BytesInBuffer() * 8;
486 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType,
487 sizeof(par_buffer),
488 &par_buffer))
489 return false;
491 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType,
492 packed_sps_.BytesInBuffer(),
493 packed_sps_.data()))
494 return false;
496 // Submit PPS.
497 memset(&par_buffer, 0, sizeof(par_buffer));
498 par_buffer.type = VAEncPackedHeaderPicture;
499 par_buffer.bit_length = packed_pps_.BytesInBuffer() * 8;
501 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType,
502 sizeof(par_buffer),
503 &par_buffer))
504 return false;
506 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType,
507 packed_pps_.BytesInBuffer(),
508 packed_pps_.data()))
509 return false;
511 return true;
514 bool VaapiVideoEncodeAccelerator::ExecuteEncode() {
515 DCHECK(current_pic_);
516 DVLOGF(3) << "Encoding frame_num: " << current_pic_->frame_num;
517 return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers(
518 current_encode_job_->input_surface->id());
521 bool VaapiVideoEncodeAccelerator::UploadFrame(
522 const scoped_refptr<media::VideoFrame>& frame) {
523 return vaapi_wrapper_->UploadVideoFrameToSurface(
524 frame, current_encode_job_->input_surface->id());
527 void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() {
528 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread());
530 if (state_ != kEncoding)
531 return;
533 if (submitted_encode_jobs_.empty() || available_bitstream_buffers_.empty())
534 return;
536 linked_ptr<BitstreamBufferRef> buffer = available_bitstream_buffers_.front();
537 available_bitstream_buffers_.pop();
539 uint8* target_data = reinterpret_cast<uint8*>(buffer->shm->memory());
541 linked_ptr<EncodeJob> encode_job = submitted_encode_jobs_.front();
542 submitted_encode_jobs_.pop();
544 size_t data_size = 0;
545 if (!vaapi_wrapper_->DownloadAndDestroyCodedBuffer(
546 encode_job->coded_buffer,
547 encode_job->input_surface->id(),
548 target_data,
549 buffer->size,
550 &data_size)) {
551 NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer");
552 return;
555 DVLOGF(3) << "Returning bitstream buffer "
556 << (encode_job->keyframe ? "(keyframe)" : "")
557 << " id: " << buffer->id << " size: " << data_size;
559 child_message_loop_proxy_->PostTask(FROM_HERE,
560 base::Bind(&Client::BitstreamBufferReady,
561 client_,
562 buffer->id,
563 data_size,
564 encode_job->keyframe));
567 void VaapiVideoEncodeAccelerator::Encode(
568 const scoped_refptr<media::VideoFrame>& frame,
569 bool force_keyframe) {
570 DVLOGF(3) << "Frame timestamp: " << frame->timestamp().InMilliseconds()
571 << " force_keyframe: " << force_keyframe;
572 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
574 encoder_thread_proxy_->PostTask(
575 FROM_HERE,
576 base::Bind(&VaapiVideoEncodeAccelerator::EncodeTask,
577 base::Unretained(this),
578 frame,
579 force_keyframe));
582 bool VaapiVideoEncodeAccelerator::PrepareNextJob() {
583 if (available_va_surface_ids_.size() < kMinSurfacesToEncode)
584 return false;
586 DCHECK(!current_encode_job_);
587 current_encode_job_.reset(new EncodeJob());
589 if (!vaapi_wrapper_->CreateCodedBuffer(output_buffer_byte_size_,
590 &current_encode_job_->coded_buffer)) {
591 NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer");
592 return false;
595 current_encode_job_->input_surface = new VASurface(
596 available_va_surface_ids_.back(), coded_size_, va_surface_release_cb_);
597 available_va_surface_ids_.pop_back();
599 current_encode_job_->recon_surface = new VASurface(
600 available_va_surface_ids_.back(), coded_size_, va_surface_release_cb_);
601 available_va_surface_ids_.pop_back();
603 // Reference surfaces are needed until the job is done, but they get
604 // removed from ref_pic_list0_ when it's full at the end of job submission.
605 // Keep refs to them along with the job and only release after sync.
606 current_encode_job_->reference_surfaces = ref_pic_list0_;
608 return true;
611 void VaapiVideoEncodeAccelerator::EncodeTask(
612 const scoped_refptr<media::VideoFrame>& frame,
613 bool force_keyframe) {
614 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread());
615 DCHECK_NE(state_, kUninitialized);
617 encoder_input_queue_.push(
618 make_linked_ptr(new InputFrameRef(frame, force_keyframe)));
619 EncodeFrameTask();
622 void VaapiVideoEncodeAccelerator::EncodeFrameTask() {
623 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread());
625 if (state_ != kEncoding || encoder_input_queue_.empty())
626 return;
628 if (!PrepareNextJob()) {
629 DVLOGF(4) << "Not ready for next frame yet";
630 return;
633 linked_ptr<InputFrameRef> frame_ref = encoder_input_queue_.front();
634 encoder_input_queue_.pop();
636 if (!UploadFrame(frame_ref->frame)) {
637 NOTIFY_ERROR(kPlatformFailureError, "Failed uploading source frame to HW.");
638 return;
641 BeginFrame(frame_ref->force_keyframe || encoding_parameters_changed_);
642 encoding_parameters_changed_ = false;
644 if (!SubmitFrameParameters()) {
645 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame parameters.");
646 return;
649 if (!SubmitHeadersIfNeeded()) {
650 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame headers.");
651 return;
654 if (!ExecuteEncode()) {
655 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting encode job to HW.");
656 return;
659 EndFrame();
660 TryToReturnBitstreamBuffer();
663 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer(
664 const media::BitstreamBuffer& buffer) {
665 DVLOGF(4) << "id: " << buffer.id();
666 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
668 if (buffer.size() < output_buffer_byte_size_) {
669 NOTIFY_ERROR(kInvalidArgumentError, "Provided bitstream buffer too small");
670 return;
673 scoped_ptr<base::SharedMemory> shm(
674 new base::SharedMemory(buffer.handle(), false));
675 if (!shm->Map(buffer.size())) {
676 NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory.");
677 return;
680 scoped_ptr<BitstreamBufferRef> buffer_ref(
681 new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size()));
683 encoder_thread_proxy_->PostTask(
684 FROM_HERE,
685 base::Bind(&VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask,
686 base::Unretained(this),
687 base::Passed(&buffer_ref)));
690 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask(
691 scoped_ptr<BitstreamBufferRef> buffer_ref) {
692 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread());
693 DCHECK_NE(state_, kUninitialized);
695 available_bitstream_buffers_.push(make_linked_ptr(buffer_ref.release()));
696 TryToReturnBitstreamBuffer();
699 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange(
700 uint32 bitrate,
701 uint32 framerate) {
702 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate;
703 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
705 encoder_thread_proxy_->PostTask(
706 FROM_HERE,
707 base::Bind(
708 &VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask,
709 base::Unretained(this),
710 bitrate,
711 framerate));
714 void VaapiVideoEncodeAccelerator::UpdateRates(uint32 bitrate,
715 uint32 framerate) {
716 if (encoder_thread_.IsRunning())
717 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread());
718 DCHECK_NE(bitrate, 0u);
719 DCHECK_NE(framerate, 0u);
720 bitrate_ = bitrate;
721 framerate_ = framerate;
722 cpb_size_ = bitrate_ * kCPBWindowSizeMs / 1000;
725 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
726 uint32 bitrate,
727 uint32 framerate) {
728 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate;
729 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread());
730 DCHECK_NE(state_, kUninitialized);
732 // This is a workaround to zero being temporarily, as part of the initial
733 // setup, provided by the webrtc video encode and a zero bitrate and
734 // framerate not being accepted by VAAPI
735 // TODO: This code is common with v4l2_video_encode_accelerator.cc, perhaps
736 // it could be pulled up to RTCVideoEncoder
737 if (bitrate < 1)
738 bitrate = 1;
739 if (framerate < 1)
740 framerate = 1;
742 if (bitrate_ == bitrate && framerate_ == framerate)
743 return;
745 UpdateRates(bitrate, framerate);
747 UpdateSPS();
748 GeneratePackedSPS();
750 // Submit new parameters along with next frame that will be processed.
751 encoding_parameters_changed_ = true;
754 void VaapiVideoEncodeAccelerator::Destroy() {
755 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
757 // Can't call client anymore after Destroy() returns.
758 client_ptr_factory_.reset();
759 weak_this_ptr_factory_.InvalidateWeakPtrs();
761 // Early-exit encoder tasks if they are running and join the thread.
762 if (encoder_thread_.IsRunning()) {
763 encoder_thread_.message_loop()->PostTask(
764 FROM_HERE,
765 base::Bind(&VaapiVideoEncodeAccelerator::DestroyTask,
766 base::Unretained(this)));
767 encoder_thread_.Stop();
770 delete this;
773 void VaapiVideoEncodeAccelerator::DestroyTask() {
774 DVLOGF(2);
775 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread());
776 SetState(kError);
779 void VaapiVideoEncodeAccelerator::UpdateSPS() {
780 memset(&current_sps_, 0, sizeof(media::H264SPS));
782 // Spec A.2 and A.3.
783 switch (profile_) {
784 case media::H264PROFILE_BASELINE:
785 // Due to crbug.com/345569, we don't distinguish between constrained
786 // and non-constrained baseline profiles. Since many codecs can't do
787 // non-constrained, and constrained is usually what we mean (and it's a
788 // subset of non-constrained), default to it.
789 current_sps_.profile_idc = media::H264SPS::kProfileIDCBaseline;
790 current_sps_.constraint_set0_flag = true;
791 break;
792 case media::H264PROFILE_MAIN:
793 current_sps_.profile_idc = media::H264SPS::kProfileIDCMain;
794 current_sps_.constraint_set1_flag = true;
795 break;
796 case media::H264PROFILE_HIGH:
797 current_sps_.profile_idc = media::H264SPS::kProfileIDCHigh;
798 break;
799 default:
800 NOTIMPLEMENTED();
801 return;
804 current_sps_.level_idc = kDefaultLevelIDC;
805 current_sps_.seq_parameter_set_id = 0;
806 current_sps_.chroma_format_idc = kChromaFormatIDC;
808 DCHECK_GE(idr_period_, 1u << 4);
809 current_sps_.log2_max_frame_num_minus4 = Log2OfPowerOf2(idr_period_) - 4;
810 current_sps_.pic_order_cnt_type = 0;
811 current_sps_.log2_max_pic_order_cnt_lsb_minus4 =
812 Log2OfPowerOf2(idr_period_ * 2) - 4;
813 current_sps_.max_num_ref_frames = max_ref_idx_l0_size_;
815 current_sps_.frame_mbs_only_flag = true;
817 DCHECK_GT(mb_width_, 0u);
818 DCHECK_GT(mb_height_, 0u);
819 current_sps_.pic_width_in_mbs_minus1 = mb_width_ - 1;
820 DCHECK(current_sps_.frame_mbs_only_flag);
821 current_sps_.pic_height_in_map_units_minus1 = mb_height_ - 1;
823 if (visible_size_ != coded_size_) {
824 // Visible size differs from coded size, fill crop information.
825 current_sps_.frame_cropping_flag = true;
826 DCHECK(!current_sps_.separate_colour_plane_flag);
827 // Spec table 6-1. Only 4:2:0 for now.
828 DCHECK_EQ(current_sps_.chroma_format_idc, 1);
829 // Spec 7.4.2.1.1. Crop is in crop units, which is 2 pixels for 4:2:0.
830 const unsigned int crop_unit_x = 2;
831 const unsigned int crop_unit_y = 2 * (2 - current_sps_.frame_mbs_only_flag);
832 current_sps_.frame_crop_left_offset = 0;
833 current_sps_.frame_crop_right_offset =
834 (coded_size_.width() - visible_size_.width()) / crop_unit_x;
835 current_sps_.frame_crop_top_offset = 0;
836 current_sps_.frame_crop_bottom_offset =
837 (coded_size_.height() - visible_size_.height()) / crop_unit_y;
840 current_sps_.vui_parameters_present_flag = true;
841 current_sps_.timing_info_present_flag = true;
842 current_sps_.num_units_in_tick = 1;
843 current_sps_.time_scale = framerate_ * 2; // See equation D-2 in spec.
844 current_sps_.fixed_frame_rate_flag = true;
846 current_sps_.nal_hrd_parameters_present_flag = true;
847 // H.264 spec ch. E.2.2.
848 current_sps_.cpb_cnt_minus1 = 0;
849 current_sps_.bit_rate_scale = kBitRateScale;
850 current_sps_.cpb_size_scale = kCPBSizeScale;
851 current_sps_.bit_rate_value_minus1[0] =
852 (bitrate_ >>
853 (kBitRateScale + media::H264SPS::kBitRateScaleConstantTerm)) - 1;
854 current_sps_.cpb_size_value_minus1[0] =
855 (cpb_size_ >>
856 (kCPBSizeScale + media::H264SPS::kCPBSizeScaleConstantTerm)) - 1;
857 current_sps_.cbr_flag[0] = true;
858 current_sps_.initial_cpb_removal_delay_length_minus_1 =
859 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1;
860 current_sps_.cpb_removal_delay_length_minus1 =
861 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1;
862 current_sps_.dpb_output_delay_length_minus1 =
863 media::H264SPS::kDefaultDPBOutputDelayLength - 1;
864 current_sps_.time_offset_length = media::H264SPS::kDefaultTimeOffsetLength;
865 current_sps_.low_delay_hrd_flag = false;
868 void VaapiVideoEncodeAccelerator::GeneratePackedSPS() {
869 packed_sps_.Reset();
871 packed_sps_.BeginNALU(media::H264NALU::kSPS, 3);
873 packed_sps_.AppendBits(8, current_sps_.profile_idc);
874 packed_sps_.AppendBool(current_sps_.constraint_set0_flag);
875 packed_sps_.AppendBool(current_sps_.constraint_set1_flag);
876 packed_sps_.AppendBool(current_sps_.constraint_set2_flag);
877 packed_sps_.AppendBool(current_sps_.constraint_set3_flag);
878 packed_sps_.AppendBool(current_sps_.constraint_set4_flag);
879 packed_sps_.AppendBool(current_sps_.constraint_set5_flag);
880 packed_sps_.AppendBits(2, 0); // reserved_zero_2bits
881 packed_sps_.AppendBits(8, current_sps_.level_idc);
882 packed_sps_.AppendUE(current_sps_.seq_parameter_set_id);
884 if (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh) {
885 packed_sps_.AppendUE(current_sps_.chroma_format_idc);
886 if (current_sps_.chroma_format_idc == 3)
887 packed_sps_.AppendBool(current_sps_.separate_colour_plane_flag);
888 packed_sps_.AppendUE(current_sps_.bit_depth_luma_minus8);
889 packed_sps_.AppendUE(current_sps_.bit_depth_chroma_minus8);
890 packed_sps_.AppendBool(current_sps_.qpprime_y_zero_transform_bypass_flag);
891 packed_sps_.AppendBool(current_sps_.seq_scaling_matrix_present_flag);
892 CHECK(!current_sps_.seq_scaling_matrix_present_flag);
895 packed_sps_.AppendUE(current_sps_.log2_max_frame_num_minus4);
896 packed_sps_.AppendUE(current_sps_.pic_order_cnt_type);
897 if (current_sps_.pic_order_cnt_type == 0)
898 packed_sps_.AppendUE(current_sps_.log2_max_pic_order_cnt_lsb_minus4);
899 else if (current_sps_.pic_order_cnt_type == 1) {
900 CHECK(1);
903 packed_sps_.AppendUE(current_sps_.max_num_ref_frames);
904 packed_sps_.AppendBool(current_sps_.gaps_in_frame_num_value_allowed_flag);
905 packed_sps_.AppendUE(current_sps_.pic_width_in_mbs_minus1);
906 packed_sps_.AppendUE(current_sps_.pic_height_in_map_units_minus1);
908 packed_sps_.AppendBool(current_sps_.frame_mbs_only_flag);
909 if (!current_sps_.frame_mbs_only_flag)
910 packed_sps_.AppendBool(current_sps_.mb_adaptive_frame_field_flag);
912 packed_sps_.AppendBool(current_sps_.direct_8x8_inference_flag);
914 packed_sps_.AppendBool(current_sps_.frame_cropping_flag);
915 if (current_sps_.frame_cropping_flag) {
916 packed_sps_.AppendUE(current_sps_.frame_crop_left_offset);
917 packed_sps_.AppendUE(current_sps_.frame_crop_right_offset);
918 packed_sps_.AppendUE(current_sps_.frame_crop_top_offset);
919 packed_sps_.AppendUE(current_sps_.frame_crop_bottom_offset);
922 packed_sps_.AppendBool(current_sps_.vui_parameters_present_flag);
923 if (current_sps_.vui_parameters_present_flag) {
924 packed_sps_.AppendBool(false); // aspect_ratio_info_present_flag
925 packed_sps_.AppendBool(false); // overscan_info_present_flag
926 packed_sps_.AppendBool(false); // video_signal_type_present_flag
927 packed_sps_.AppendBool(false); // chroma_loc_info_present_flag
929 packed_sps_.AppendBool(current_sps_.timing_info_present_flag);
930 if (current_sps_.timing_info_present_flag) {
931 packed_sps_.AppendBits(32, current_sps_.num_units_in_tick);
932 packed_sps_.AppendBits(32, current_sps_.time_scale);
933 packed_sps_.AppendBool(current_sps_.fixed_frame_rate_flag);
936 packed_sps_.AppendBool(current_sps_.nal_hrd_parameters_present_flag);
937 if (current_sps_.nal_hrd_parameters_present_flag) {
938 packed_sps_.AppendUE(current_sps_.cpb_cnt_minus1);
939 packed_sps_.AppendBits(4, current_sps_.bit_rate_scale);
940 packed_sps_.AppendBits(4, current_sps_.cpb_size_scale);
941 CHECK_LT(base::checked_cast<size_t>(current_sps_.cpb_cnt_minus1),
942 arraysize(current_sps_.bit_rate_value_minus1));
943 for (int i = 0; i <= current_sps_.cpb_cnt_minus1; ++i) {
944 packed_sps_.AppendUE(current_sps_.bit_rate_value_minus1[i]);
945 packed_sps_.AppendUE(current_sps_.cpb_size_value_minus1[i]);
946 packed_sps_.AppendBool(current_sps_.cbr_flag[i]);
948 packed_sps_.AppendBits(
949 5, current_sps_.initial_cpb_removal_delay_length_minus_1);
950 packed_sps_.AppendBits(5, current_sps_.cpb_removal_delay_length_minus1);
951 packed_sps_.AppendBits(5, current_sps_.dpb_output_delay_length_minus1);
952 packed_sps_.AppendBits(5, current_sps_.time_offset_length);
955 packed_sps_.AppendBool(false); // vcl_hrd_parameters_flag
956 if (current_sps_.nal_hrd_parameters_present_flag)
957 packed_sps_.AppendBool(current_sps_.low_delay_hrd_flag);
959 packed_sps_.AppendBool(false); // pic_struct_present_flag
960 packed_sps_.AppendBool(true); // bitstream_restriction_flag
962 packed_sps_.AppendBool(false); // motion_vectors_over_pic_boundaries_flag
963 packed_sps_.AppendUE(2); // max_bytes_per_pic_denom
964 packed_sps_.AppendUE(1); // max_bits_per_mb_denom
965 packed_sps_.AppendUE(16); // log2_max_mv_length_horizontal
966 packed_sps_.AppendUE(16); // log2_max_mv_length_vertical
968 // Explicitly set max_num_reorder_frames to 0 to allow the decoder to
969 // output pictures early.
970 packed_sps_.AppendUE(0); // max_num_reorder_frames
972 // The value of max_dec_frame_buffering shall be greater than or equal to
973 // max_num_ref_frames.
974 const unsigned int max_dec_frame_buffering =
975 current_sps_.max_num_ref_frames;
976 packed_sps_.AppendUE(max_dec_frame_buffering);
979 packed_sps_.FinishNALU();
982 void VaapiVideoEncodeAccelerator::UpdatePPS() {
983 memset(&current_pps_, 0, sizeof(media::H264PPS));
985 current_pps_.seq_parameter_set_id = current_sps_.seq_parameter_set_id;
986 current_pps_.pic_parameter_set_id = 0;
988 current_pps_.entropy_coding_mode_flag =
989 current_sps_.profile_idc >= media::H264SPS::kProfileIDCMain;
991 CHECK_GT(max_ref_idx_l0_size_, 0u);
992 current_pps_.num_ref_idx_l0_default_active_minus1 = max_ref_idx_l0_size_ - 1;
993 current_pps_.num_ref_idx_l1_default_active_minus1 = 0;
994 DCHECK_LE(qp_, 51u);
995 current_pps_.pic_init_qp_minus26 = qp_ - 26;
996 current_pps_.deblocking_filter_control_present_flag = true;
997 current_pps_.transform_8x8_mode_flag =
998 (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh);
1001 void VaapiVideoEncodeAccelerator::GeneratePackedPPS() {
1002 packed_pps_.Reset();
1004 packed_pps_.BeginNALU(media::H264NALU::kPPS, 3);
1006 packed_pps_.AppendUE(current_pps_.pic_parameter_set_id);
1007 packed_pps_.AppendUE(current_pps_.seq_parameter_set_id);
1008 packed_pps_.AppendBool(current_pps_.entropy_coding_mode_flag);
1009 packed_pps_.AppendBool(
1010 current_pps_.bottom_field_pic_order_in_frame_present_flag);
1011 CHECK_EQ(current_pps_.num_slice_groups_minus1, 0);
1012 packed_pps_.AppendUE(current_pps_.num_slice_groups_minus1);
1014 packed_pps_.AppendUE(current_pps_.num_ref_idx_l0_default_active_minus1);
1015 packed_pps_.AppendUE(current_pps_.num_ref_idx_l1_default_active_minus1);
1017 packed_pps_.AppendBool(current_pps_.weighted_pred_flag);
1018 packed_pps_.AppendBits(2, current_pps_.weighted_bipred_idc);
1020 packed_pps_.AppendSE(current_pps_.pic_init_qp_minus26);
1021 packed_pps_.AppendSE(current_pps_.pic_init_qs_minus26);
1022 packed_pps_.AppendSE(current_pps_.chroma_qp_index_offset);
1024 packed_pps_.AppendBool(current_pps_.deblocking_filter_control_present_flag);
1025 packed_pps_.AppendBool(current_pps_.constrained_intra_pred_flag);
1026 packed_pps_.AppendBool(current_pps_.redundant_pic_cnt_present_flag);
1028 packed_pps_.AppendBool(current_pps_.transform_8x8_mode_flag);
1029 packed_pps_.AppendBool(current_pps_.pic_scaling_matrix_present_flag);
1030 DCHECK(!current_pps_.pic_scaling_matrix_present_flag);
1031 packed_pps_.AppendSE(current_pps_.second_chroma_qp_index_offset);
1033 packed_pps_.FinishNALU();
1036 void VaapiVideoEncodeAccelerator::SetState(State state) {
1037 // Only touch state on encoder thread, unless it's not running.
1038 if (encoder_thread_.IsRunning() &&
1039 !encoder_thread_proxy_->BelongsToCurrentThread()) {
1040 encoder_thread_proxy_->PostTask(
1041 FROM_HERE,
1042 base::Bind(&VaapiVideoEncodeAccelerator::SetState,
1043 base::Unretained(this),
1044 state));
1045 return;
1048 DVLOGF(1) << "setting state to: " << state;
1049 state_ = state;
1052 void VaapiVideoEncodeAccelerator::NotifyError(Error error) {
1053 if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
1054 child_message_loop_proxy_->PostTask(
1055 FROM_HERE,
1056 base::Bind(
1057 &VaapiVideoEncodeAccelerator::NotifyError, weak_this_, error));
1058 return;
1061 if (client_) {
1062 client_->NotifyError(error);
1063 client_ptr_factory_.reset();
1067 VaapiVideoEncodeAccelerator::EncodeJob::EncodeJob()
1068 : coded_buffer(VA_INVALID_ID), keyframe(false) {
1071 VaapiVideoEncodeAccelerator::EncodeJob::~EncodeJob() {
1074 } // namespace content