1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/cast/sender/external_video_encoder.h"
10 #include "base/logging.h"
11 #include "base/memory/scoped_vector.h"
12 #include "base/memory/shared_memory.h"
13 #include "base/message_loop/message_loop.h"
14 #include "base/metrics/histogram.h"
15 #include "media/base/video_frame.h"
16 #include "media/base/video_types.h"
17 #include "media/base/video_util.h"
18 #include "media/cast/cast_defines.h"
19 #include "media/cast/logging/logging_defines.h"
20 #include "media/cast/net/cast_transport_config.h"
24 static const size_t kOutputBufferCount
= 3;
26 void LogFrameEncodedEvent(
27 const scoped_refptr
<media::cast::CastEnvironment
>& cast_environment
,
28 base::TimeTicks event_time
,
29 media::cast::RtpTimestamp rtp_timestamp
,
31 cast_environment
->Logging()->InsertFrameEvent(
32 event_time
, media::cast::FRAME_ENCODED
, media::cast::VIDEO_EVENT
,
33 rtp_timestamp
, frame_id
);
41 // Container for the associated data of a video frame being processed.
42 struct InProgressFrameEncode
{
43 // The source content to encode.
44 const scoped_refptr
<VideoFrame
> video_frame
;
46 // The reference time for this frame.
47 const base::TimeTicks reference_time
;
49 // The callback to run when the result is ready.
50 const VideoEncoder::FrameEncodedCallback frame_encoded_callback
;
52 // The target encode bit rate.
53 const int target_bit_rate
;
55 // The real-world encode start time. This is used to compute the encoded
56 // frame's |deadline_utilization| and so it uses the real-world clock instead
57 // of the CastEnvironment clock, the latter of which might be simulated.
58 const base::TimeTicks start_time
;
60 InProgressFrameEncode(const scoped_refptr
<VideoFrame
>& v_frame
,
61 base::TimeTicks r_time
,
62 VideoEncoder::FrameEncodedCallback callback
,
64 : video_frame(v_frame
),
65 reference_time(r_time
),
66 frame_encoded_callback(callback
),
67 target_bit_rate(bit_rate
),
68 start_time(base::TimeTicks::Now()) {}
71 // Owns a VideoEncoderAccelerator instance and provides the necessary adapters
72 // to encode media::VideoFrames and emit media::cast::EncodedFrames. All
73 // methods must be called on the thread associated with the given
74 // SingleThreadTaskRunner, except for the task_runner() accessor.
75 class ExternalVideoEncoder::VEAClientImpl
76 : public VideoEncodeAccelerator::Client
,
77 public base::RefCountedThreadSafe
<VEAClientImpl
> {
80 const scoped_refptr
<CastEnvironment
>& cast_environment
,
81 const scoped_refptr
<base::SingleThreadTaskRunner
>& encoder_task_runner
,
82 scoped_ptr
<media::VideoEncodeAccelerator
> vea
,
84 const StatusChangeCallback
& status_change_cb
,
85 const CreateVideoEncodeMemoryCallback
& create_video_encode_memory_cb
)
86 : cast_environment_(cast_environment
),
87 task_runner_(encoder_task_runner
),
88 max_frame_rate_(max_frame_rate
),
89 status_change_cb_(status_change_cb
),
90 create_video_encode_memory_cb_(create_video_encode_memory_cb
),
91 video_encode_accelerator_(vea
.Pass()),
92 encoder_active_(false),
94 key_frame_encountered_(false),
95 requested_bit_rate_(-1) {
98 base::SingleThreadTaskRunner
* task_runner() const {
99 return task_runner_
.get();
102 void Initialize(const gfx::Size
& frame_size
,
103 VideoCodecProfile codec_profile
,
105 uint32 first_frame_id
) {
106 DCHECK(task_runner_
->RunsTasksOnCurrentThread());
108 requested_bit_rate_
= start_bit_rate
;
109 encoder_active_
= video_encode_accelerator_
->Initialize(
110 media::PIXEL_FORMAT_I420
, frame_size
, codec_profile
, start_bit_rate
,
112 next_frame_id_
= first_frame_id
;
114 UMA_HISTOGRAM_BOOLEAN("Cast.Sender.VideoEncodeAcceleratorInitializeSuccess",
117 cast_environment_
->PostTask(
118 CastEnvironment::MAIN
,
120 base::Bind(status_change_cb_
,
121 encoder_active_
? STATUS_INITIALIZED
:
122 STATUS_CODEC_INIT_FAILED
));
125 void SetBitRate(int bit_rate
) {
126 DCHECK(task_runner_
->RunsTasksOnCurrentThread());
128 requested_bit_rate_
= bit_rate
;
129 video_encode_accelerator_
->RequestEncodingParametersChange(bit_rate
,
133 void EncodeVideoFrame(
134 const scoped_refptr
<media::VideoFrame
>& video_frame
,
135 const base::TimeTicks
& reference_time
,
136 bool key_frame_requested
,
137 const VideoEncoder::FrameEncodedCallback
& frame_encoded_callback
) {
138 DCHECK(task_runner_
->RunsTasksOnCurrentThread());
140 if (!encoder_active_
)
143 in_progress_frame_encodes_
.push_back(InProgressFrameEncode(
144 video_frame
, reference_time
, frame_encoded_callback
,
145 requested_bit_rate_
));
147 // BitstreamBufferReady will be called once the encoder is done.
148 video_encode_accelerator_
->Encode(video_frame
, key_frame_requested
);
152 void NotifyError(VideoEncodeAccelerator::Error error
) final
{
153 DCHECK(task_runner_
->RunsTasksOnCurrentThread());
155 DCHECK(error
!= VideoEncodeAccelerator::kInvalidArgumentError
&&
156 error
!= VideoEncodeAccelerator::kIllegalStateError
);
158 encoder_active_
= false;
160 cast_environment_
->PostTask(
161 CastEnvironment::MAIN
,
163 base::Bind(status_change_cb_
, STATUS_CODEC_RUNTIME_ERROR
));
165 // TODO(miu): Force-flush all |in_progress_frame_encodes_| immediately so
166 // pending frames do not become stuck, freezing VideoSender.
169 // Called to allocate the input and output buffers.
170 void RequireBitstreamBuffers(unsigned int input_count
,
171 const gfx::Size
& input_coded_size
,
172 size_t output_buffer_size
) final
{
173 DCHECK(task_runner_
->RunsTasksOnCurrentThread());
175 // TODO(miu): Investigate why we are ignoring |input_count| (4) and instead
176 // using |kOutputBufferCount| (3) here.
177 for (size_t j
= 0; j
< kOutputBufferCount
; ++j
) {
178 create_video_encode_memory_cb_
.Run(
180 base::Bind(&VEAClientImpl::OnCreateSharedMemory
, this));
184 // Encoder has encoded a frame and it's available in one of the output
185 // buffers. Package the result in a media::cast::EncodedFrame and post it
186 // to the Cast MAIN thread via the supplied callback.
187 void BitstreamBufferReady(int32 bitstream_buffer_id
,
189 bool key_frame
) final
{
190 DCHECK(task_runner_
->RunsTasksOnCurrentThread());
191 if (bitstream_buffer_id
< 0 ||
192 bitstream_buffer_id
>= static_cast<int32
>(output_buffers_
.size())) {
194 VLOG(1) << "BitstreamBufferReady(): invalid bitstream_buffer_id="
195 << bitstream_buffer_id
;
196 NotifyError(media::VideoEncodeAccelerator::kPlatformFailureError
);
199 base::SharedMemory
* output_buffer
= output_buffers_
[bitstream_buffer_id
];
200 if (payload_size
> output_buffer
->mapped_size()) {
202 VLOG(1) << "BitstreamBufferReady(): invalid payload_size = "
204 NotifyError(media::VideoEncodeAccelerator::kPlatformFailureError
);
208 key_frame_encountered_
= true;
209 if (!key_frame_encountered_
) {
210 // Do not send video until we have encountered the first key frame.
211 // Save the bitstream buffer in |stream_header_| to be sent later along
212 // with the first key frame.
214 // TODO(miu): Should |stream_header_| be an std::ostringstream for
215 // performance reasons?
216 stream_header_
.append(static_cast<const char*>(output_buffer
->memory()),
218 } else if (!in_progress_frame_encodes_
.empty()) {
219 const InProgressFrameEncode
& request
= in_progress_frame_encodes_
.front();
221 scoped_ptr
<SenderEncodedFrame
> encoded_frame(new SenderEncodedFrame());
222 encoded_frame
->dependency
= key_frame
? EncodedFrame::KEY
:
223 EncodedFrame::DEPENDENT
;
224 encoded_frame
->frame_id
= next_frame_id_
++;
226 encoded_frame
->referenced_frame_id
= encoded_frame
->frame_id
;
228 encoded_frame
->referenced_frame_id
= encoded_frame
->frame_id
- 1;
229 encoded_frame
->rtp_timestamp
= TimeDeltaToRtpDelta(
230 request
.video_frame
->timestamp(), kVideoFrequency
);
231 encoded_frame
->reference_time
= request
.reference_time
;
232 if (!stream_header_
.empty()) {
233 encoded_frame
->data
= stream_header_
;
234 stream_header_
.clear();
236 encoded_frame
->data
.append(
237 static_cast<const char*>(output_buffer
->memory()), payload_size
);
239 // If FRAME_DURATION metadata was provided in the source VideoFrame,
240 // compute the utilization metrics.
241 base::TimeDelta frame_duration
;
242 if (request
.video_frame
->metadata()->GetTimeDelta(
243 media::VideoFrameMetadata::FRAME_DURATION
, &frame_duration
) &&
244 frame_duration
> base::TimeDelta()) {
245 // Compute deadline utilization as the real-world time elapsed divided
246 // by the frame duration.
247 const base::TimeDelta processing_time
=
248 base::TimeTicks::Now() - request
.start_time
;
249 encoded_frame
->deadline_utilization
=
250 processing_time
.InSecondsF() / frame_duration
.InSecondsF();
252 // See vp8_encoder.cc for an explanation of this math. Here, we are
253 // computing a substitute value for |quantizer| using the
254 // QuantizerEstimator.
255 const double actual_bit_rate
=
256 encoded_frame
->data
.size() * 8.0 / frame_duration
.InSecondsF();
257 DCHECK_GT(request
.target_bit_rate
, 0);
258 const double bitrate_utilization
=
259 actual_bit_rate
/ request
.target_bit_rate
;
260 const double quantizer
=
261 (encoded_frame
->dependency
== EncodedFrame::KEY
) ?
262 quantizer_estimator_
.EstimateForKeyFrame(*request
.video_frame
) :
263 quantizer_estimator_
.EstimateForDeltaFrame(*request
.video_frame
);
264 if (quantizer
!= QuantizerEstimator::NO_RESULT
) {
265 encoded_frame
->lossy_utilization
= bitrate_utilization
*
266 (quantizer
/ QuantizerEstimator::MAX_VP8_QUANTIZER
);
269 quantizer_estimator_
.Reset();
272 cast_environment_
->PostTask(
273 CastEnvironment::MAIN
,
275 base::Bind(&LogFrameEncodedEvent
,
277 cast_environment_
->Clock()->NowTicks(),
278 encoded_frame
->rtp_timestamp
,
279 encoded_frame
->frame_id
));
281 cast_environment_
->PostTask(
282 CastEnvironment::MAIN
,
284 base::Bind(request
.frame_encoded_callback
,
285 base::Passed(&encoded_frame
)));
287 in_progress_frame_encodes_
.pop_front();
289 VLOG(1) << "BitstreamBufferReady(): no encoded frame data available";
292 // We need to re-add the output buffer to the encoder after we are done
294 video_encode_accelerator_
->UseOutputBitstreamBuffer(media::BitstreamBuffer(
296 output_buffers_
[bitstream_buffer_id
]->handle(),
297 output_buffers_
[bitstream_buffer_id
]->mapped_size()));
301 friend class base::RefCountedThreadSafe
<VEAClientImpl
>;
303 ~VEAClientImpl() final
{
304 // According to the media::VideoEncodeAccelerator interface, Destroy()
305 // should be called instead of invoking its private destructor.
306 task_runner_
->PostTask(
308 base::Bind(&media::VideoEncodeAccelerator::Destroy
,
309 base::Unretained(video_encode_accelerator_
.release())));
312 // Note: This method can be called on any thread.
313 void OnCreateSharedMemory(scoped_ptr
<base::SharedMemory
> memory
) {
314 task_runner_
->PostTask(FROM_HERE
,
315 base::Bind(&VEAClientImpl::OnReceivedSharedMemory
,
317 base::Passed(&memory
)));
320 void OnReceivedSharedMemory(scoped_ptr
<base::SharedMemory
> memory
) {
321 DCHECK(task_runner_
->RunsTasksOnCurrentThread());
323 output_buffers_
.push_back(memory
.Pass());
325 // Wait until all requested buffers are received.
326 if (output_buffers_
.size() < kOutputBufferCount
)
329 // Immediately provide all output buffers to the VEA.
330 for (size_t i
= 0; i
< output_buffers_
.size(); ++i
) {
331 video_encode_accelerator_
->UseOutputBitstreamBuffer(
332 media::BitstreamBuffer(static_cast<int32
>(i
),
333 output_buffers_
[i
]->handle(),
334 output_buffers_
[i
]->mapped_size()));
338 const scoped_refptr
<CastEnvironment
> cast_environment_
;
339 const scoped_refptr
<base::SingleThreadTaskRunner
> task_runner_
;
340 const int max_frame_rate_
;
341 const StatusChangeCallback status_change_cb_
; // Must be run on MAIN thread.
342 const CreateVideoEncodeMemoryCallback create_video_encode_memory_cb_
;
343 scoped_ptr
<media::VideoEncodeAccelerator
> video_encode_accelerator_
;
344 bool encoder_active_
;
345 uint32 next_frame_id_
;
346 bool key_frame_encountered_
;
347 std::string stream_header_
;
349 // Shared memory buffers for output with the VideoAccelerator.
350 ScopedVector
<base::SharedMemory
> output_buffers_
;
353 std::list
<InProgressFrameEncode
> in_progress_frame_encodes_
;
355 // The requested encode bit rate for the next frame.
356 int requested_bit_rate_
;
358 // Used to compute utilization metrics for each frame.
359 QuantizerEstimator quantizer_estimator_
;
361 DISALLOW_COPY_AND_ASSIGN(VEAClientImpl
);
365 bool ExternalVideoEncoder::IsSupported(const VideoSenderConfig
& video_config
) {
366 if (video_config
.codec
!= CODEC_VIDEO_VP8
&&
367 video_config
.codec
!= CODEC_VIDEO_H264
)
370 // TODO(miu): "Layering hooks" are needed to be able to query outside of
371 // libmedia, to determine whether the system provides a hardware encoder. For
372 // now, assume that this was already checked by this point.
373 // http://crbug.com/454029
374 return video_config
.use_external_encoder
;
377 ExternalVideoEncoder::ExternalVideoEncoder(
378 const scoped_refptr
<CastEnvironment
>& cast_environment
,
379 const VideoSenderConfig
& video_config
,
380 const gfx::Size
& frame_size
,
381 uint32 first_frame_id
,
382 const StatusChangeCallback
& status_change_cb
,
383 const CreateVideoEncodeAcceleratorCallback
& create_vea_cb
,
384 const CreateVideoEncodeMemoryCallback
& create_video_encode_memory_cb
)
385 : cast_environment_(cast_environment
),
386 create_video_encode_memory_cb_(create_video_encode_memory_cb
),
387 frame_size_(frame_size
),
388 bit_rate_(video_config
.start_bitrate
),
389 key_frame_requested_(false),
390 weak_factory_(this) {
391 DCHECK(cast_environment_
->CurrentlyOn(CastEnvironment::MAIN
));
392 DCHECK_GT(video_config
.max_frame_rate
, 0);
393 DCHECK(!frame_size_
.IsEmpty());
394 DCHECK(!status_change_cb
.is_null());
395 DCHECK(!create_vea_cb
.is_null());
396 DCHECK(!create_video_encode_memory_cb_
.is_null());
397 DCHECK_GT(bit_rate_
, 0);
400 base::Bind(&ExternalVideoEncoder::OnCreateVideoEncodeAccelerator
,
401 weak_factory_
.GetWeakPtr(),
407 ExternalVideoEncoder::~ExternalVideoEncoder() {
410 bool ExternalVideoEncoder::EncodeVideoFrame(
411 const scoped_refptr
<media::VideoFrame
>& video_frame
,
412 const base::TimeTicks
& reference_time
,
413 const FrameEncodedCallback
& frame_encoded_callback
) {
414 DCHECK(cast_environment_
->CurrentlyOn(CastEnvironment::MAIN
));
415 DCHECK(!frame_encoded_callback
.is_null());
417 if (!client_
|| video_frame
->visible_rect().size() != frame_size_
)
420 client_
->task_runner()->PostTask(FROM_HERE
,
421 base::Bind(&VEAClientImpl::EncodeVideoFrame
,
425 key_frame_requested_
,
426 frame_encoded_callback
));
427 key_frame_requested_
= false;
431 void ExternalVideoEncoder::SetBitRate(int new_bit_rate
) {
432 DCHECK(cast_environment_
->CurrentlyOn(CastEnvironment::MAIN
));
433 DCHECK_GT(new_bit_rate
, 0);
435 bit_rate_
= new_bit_rate
;
438 client_
->task_runner()->PostTask(
439 FROM_HERE
, base::Bind(&VEAClientImpl::SetBitRate
, client_
, bit_rate_
));
442 void ExternalVideoEncoder::GenerateKeyFrame() {
443 DCHECK(cast_environment_
->CurrentlyOn(CastEnvironment::MAIN
));
444 key_frame_requested_
= true;
447 void ExternalVideoEncoder::LatestFrameIdToReference(uint32
/*frame_id*/) {
448 // Do nothing. Not supported.
451 void ExternalVideoEncoder::OnCreateVideoEncodeAccelerator(
452 const VideoSenderConfig
& video_config
,
453 uint32 first_frame_id
,
454 const StatusChangeCallback
& status_change_cb
,
455 scoped_refptr
<base::SingleThreadTaskRunner
> encoder_task_runner
,
456 scoped_ptr
<media::VideoEncodeAccelerator
> vea
) {
457 DCHECK(cast_environment_
->CurrentlyOn(CastEnvironment::MAIN
));
459 // The callback will be invoked with null pointers in the case where the
460 // system does not support or lacks the resources to provide GPU-accelerated
462 if (!encoder_task_runner
|| !vea
) {
463 cast_environment_
->PostTask(
464 CastEnvironment::MAIN
,
466 base::Bind(status_change_cb
, STATUS_CODEC_INIT_FAILED
));
470 VideoCodecProfile codec_profile
;
471 switch (video_config
.codec
) {
472 case CODEC_VIDEO_VP8
:
473 codec_profile
= media::VP8PROFILE_ANY
;
475 case CODEC_VIDEO_H264
:
476 codec_profile
= media::H264PROFILE_MAIN
;
478 case CODEC_VIDEO_FAKE
:
479 NOTREACHED() << "Fake software video encoder cannot be external";
480 // ...flow through to next case...
482 cast_environment_
->PostTask(
483 CastEnvironment::MAIN
,
485 base::Bind(status_change_cb
, STATUS_UNSUPPORTED_CODEC
));
490 client_
= new VEAClientImpl(cast_environment_
,
493 video_config
.max_frame_rate
,
495 create_video_encode_memory_cb_
);
496 client_
->task_runner()->PostTask(FROM_HERE
,
497 base::Bind(&VEAClientImpl::Initialize
,
505 SizeAdaptableExternalVideoEncoder::SizeAdaptableExternalVideoEncoder(
506 const scoped_refptr
<CastEnvironment
>& cast_environment
,
507 const VideoSenderConfig
& video_config
,
508 const StatusChangeCallback
& status_change_cb
,
509 const CreateVideoEncodeAcceleratorCallback
& create_vea_cb
,
510 const CreateVideoEncodeMemoryCallback
& create_video_encode_memory_cb
)
511 : SizeAdaptableVideoEncoderBase(cast_environment
,
514 create_vea_cb_(create_vea_cb
),
515 create_video_encode_memory_cb_(create_video_encode_memory_cb
) {}
517 SizeAdaptableExternalVideoEncoder::~SizeAdaptableExternalVideoEncoder() {}
519 scoped_ptr
<VideoEncoder
> SizeAdaptableExternalVideoEncoder::CreateEncoder() {
520 return scoped_ptr
<VideoEncoder
>(new ExternalVideoEncoder(
525 CreateEncoderStatusChangeCallback(),
527 create_video_encode_memory_cb_
));
530 QuantizerEstimator::QuantizerEstimator() {}
532 QuantizerEstimator::~QuantizerEstimator() {}
534 void QuantizerEstimator::Reset() {
535 last_frame_pixel_buffer_
.reset();
538 double QuantizerEstimator::EstimateForKeyFrame(const VideoFrame
& frame
) {
539 if (!CanExamineFrame(frame
))
542 // If the size of the frame is different from the last frame, allocate a new
543 // buffer. The buffer only needs to be a fraction of the size of the entire
544 // frame, since the entropy analysis only examines a subset of each frame.
545 const gfx::Size size
= frame
.visible_rect().size();
546 const int rows_in_subset
=
547 std::max(1, size
.height() * FRAME_SAMPLING_PERCENT
/ 100);
548 if (last_frame_size_
!= size
|| !last_frame_pixel_buffer_
) {
549 last_frame_pixel_buffer_
.reset(new uint8
[size
.width() * rows_in_subset
]);
550 last_frame_size_
= size
;
553 // Compute a histogram where each bucket represents the number of times two
554 // neighboring pixels were different by a specific amount. 511 buckets are
555 // needed, one for each integer in the range [-255,255].
557 memset(histogram
, 0, sizeof(histogram
));
558 const int row_skip
= size
.height() / rows_in_subset
;
560 for (int i
= 0; i
< rows_in_subset
; ++i
, y
+= row_skip
) {
561 const uint8
* const row_begin
= frame
.visible_data(VideoFrame::kYPlane
) +
562 y
* frame
.stride(VideoFrame::kYPlane
);
563 const uint8
* const row_end
= row_begin
+ size
.width();
564 int left_hand_pixel_value
= static_cast<int>(*row_begin
);
565 for (const uint8
* p
= row_begin
+ 1; p
< row_end
; ++p
) {
566 const int right_hand_pixel_value
= static_cast<int>(*p
);
567 const int difference
= right_hand_pixel_value
- left_hand_pixel_value
;
568 const int histogram_index
= difference
+ 255;
569 ++histogram
[histogram_index
];
570 left_hand_pixel_value
= right_hand_pixel_value
; // For next iteration.
573 // Copy the row of pixels into the buffer. This will be used when
574 // generating histograms for future delta frames.
575 memcpy(last_frame_pixel_buffer_
.get() + i
* size
.width(),
580 // Estimate a quantizer value depending on the difference data in the
581 // histogram and return it.
582 const int num_samples
= (size
.width() - 1) * rows_in_subset
;
583 return ToQuantizerEstimate(ComputeEntropyFromHistogram(
584 histogram
, arraysize(histogram
), num_samples
));
587 double QuantizerEstimator::EstimateForDeltaFrame(const VideoFrame
& frame
) {
588 if (!CanExamineFrame(frame
))
591 // If the size of the |frame| has changed, no difference can be examined.
592 // In this case, process this frame as if it were a key frame.
593 const gfx::Size size
= frame
.visible_rect().size();
594 if (last_frame_size_
!= size
|| !last_frame_pixel_buffer_
)
595 return EstimateForKeyFrame(frame
);
596 const int rows_in_subset
=
597 std::max(1, size
.height() * FRAME_SAMPLING_PERCENT
/ 100);
599 // Compute a histogram where each bucket represents the number of times the
600 // same pixel in this frame versus the last frame was different by a specific
601 // amount. 511 buckets are needed, one for each integer in the range
604 memset(histogram
, 0, sizeof(histogram
));
605 const int row_skip
= size
.height() / rows_in_subset
;
607 for (int i
= 0; i
< rows_in_subset
; ++i
, y
+= row_skip
) {
608 const uint8
* const row_begin
= frame
.visible_data(VideoFrame::kYPlane
) +
609 y
* frame
.stride(VideoFrame::kYPlane
);
610 const uint8
* const row_end
= row_begin
+ size
.width();
611 uint8
* const last_frame_row_begin
=
612 last_frame_pixel_buffer_
.get() + i
* size
.width();
613 for (const uint8
* p
= row_begin
, *q
= last_frame_row_begin
; p
< row_end
;
615 const int difference
= static_cast<int>(*p
) - static_cast<int>(*q
);
616 const int histogram_index
= difference
+ 255;
617 ++histogram
[histogram_index
];
620 // Copy the row of pixels into the buffer. This will be used when
621 // generating histograms for future delta frames.
622 memcpy(last_frame_row_begin
, row_begin
, size
.width());
625 // Estimate a quantizer value depending on the difference data in the
626 // histogram and return it.
627 const int num_samples
= size
.width() * rows_in_subset
;
628 return ToQuantizerEstimate(ComputeEntropyFromHistogram(
629 histogram
, arraysize(histogram
), num_samples
));
633 bool QuantizerEstimator::CanExamineFrame(const VideoFrame
& frame
) {
634 DCHECK_EQ(8, VideoFrame::PlaneHorizontalBitsPerPixel(frame
.format(),
635 VideoFrame::kYPlane
));
636 return media::IsYuvPlanar(frame
.format()) &&
637 !frame
.visible_rect().IsEmpty();
641 double QuantizerEstimator::ComputeEntropyFromHistogram(const int* histogram
,
644 DCHECK_LT(0, num_samples
);
645 double entropy
= 0.0;
646 for (size_t i
= 0; i
< num_buckets
; ++i
) {
647 const double probability
= static_cast<double>(histogram
[i
]) / num_samples
;
648 if (probability
> 0.0)
649 entropy
= entropy
- probability
* log2(probability
);
655 double QuantizerEstimator::ToQuantizerEstimate(double shannon_entropy
) {
656 DCHECK_GE(shannon_entropy
, 0.0);
658 // This math is based on an analysis of data produced by running a wide range
659 // of mirroring content in a Cast streaming session on a Chromebook Pixel
660 // (2013 edition). The output from the Pixel's built-in hardware encoder was
661 // compared to an identically-configured software implementation (libvpx)
662 // running alongside. Based on an analysis of the data, the following linear
663 // mapping seems to produce reasonable VP8 quantizer values from the
664 // |shannon_entropy| values.
666 // TODO(miu): Confirm whether this model and value work well on other
668 const double kEntropyAtMaxQuantizer
= 7.5;
670 (MAX_VP8_QUANTIZER
- MIN_VP8_QUANTIZER
) / kEntropyAtMaxQuantizer
;
671 const double quantizer
= std::min
<double>(
672 MAX_VP8_QUANTIZER
, MIN_VP8_QUANTIZER
+ slope
* shannon_entropy
);