Refactor WebsiteSettings to operate on a SecurityInfo
[chromium-blink-merge.git] / media / cast / sender / video_sender.cc
bloba96f2fde2785af828794b0b18aa76fdba4028c24
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/cast/sender/video_sender.h"
7 #include <algorithm>
8 #include <cmath>
9 #include <cstring>
11 #include "base/bind.h"
12 #include "base/logging.h"
13 #include "base/trace_event/trace_event.h"
14 #include "media/cast/cast_defines.h"
15 #include "media/cast/net/cast_transport_config.h"
16 #include "media/cast/sender/performance_metrics_overlay.h"
17 #include "media/cast/sender/video_encoder.h"
19 namespace media {
20 namespace cast {
22 namespace {
24 // The following two constants are used to adjust the target
25 // playout delay (when allowed). They were calculated using
26 // a combination of cast_benchmark runs and manual testing.
28 // This is how many round trips we think we need on the network.
29 const int kRoundTripsNeeded = 4;
31 // This is an estimate of all the the constant time needed independent of
32 // network quality (e.g., additional time that accounts for encode and decode
33 // time).
34 const int kConstantTimeMs = 75;
36 // The target maximum utilization of the encoder and network resources. This is
37 // used to attenuate the actual measured utilization values in order to provide
38 // "breathing room" (i.e., to ensure there will be sufficient CPU and bandwidth
39 // available to handle the occasional more-complex frames).
40 const int kTargetUtilizationPercentage = 75;
42 // Extract capture begin/end timestamps from |video_frame|'s metadata and log
43 // it.
44 void LogVideoCaptureTimestamps(const CastEnvironment& cast_environment,
45 const media::VideoFrame& video_frame,
46 RtpTimestamp rtp_timestamp) {
47 base::TimeTicks capture_begin_time;
48 base::TimeTicks capture_end_time;
49 if (!video_frame.metadata()->GetTimeTicks(
50 media::VideoFrameMetadata::CAPTURE_BEGIN_TIME, &capture_begin_time) ||
51 !video_frame.metadata()->GetTimeTicks(
52 media::VideoFrameMetadata::CAPTURE_END_TIME, &capture_end_time)) {
53 // The frame capture timestamps were not provided by the video capture
54 // source. Simply log the events as happening right now.
55 capture_begin_time = capture_end_time =
56 cast_environment.Clock()->NowTicks();
58 cast_environment.Logging()->InsertFrameEvent(
59 capture_begin_time, FRAME_CAPTURE_BEGIN, VIDEO_EVENT, rtp_timestamp,
60 kFrameIdUnknown);
61 cast_environment.Logging()->InsertCapturedVideoFrameEvent(
62 capture_end_time, rtp_timestamp, video_frame.visible_rect().width(),
63 video_frame.visible_rect().height());
66 } // namespace
68 // Note, we use a fixed bitrate value when external video encoder is used.
69 // Some hardware encoder shows bad behavior if we set the bitrate too
70 // frequently, e.g. quality drop, not abiding by target bitrate, etc.
71 // See details: crbug.com/392086.
72 VideoSender::VideoSender(
73 scoped_refptr<CastEnvironment> cast_environment,
74 const VideoSenderConfig& video_config,
75 const StatusChangeCallback& status_change_cb,
76 const CreateVideoEncodeAcceleratorCallback& create_vea_cb,
77 const CreateVideoEncodeMemoryCallback& create_video_encode_mem_cb,
78 CastTransportSender* const transport_sender,
79 const PlayoutDelayChangeCB& playout_delay_change_cb)
80 : FrameSender(
81 cast_environment,
82 false,
83 transport_sender,
84 kVideoFrequency,
85 video_config.ssrc,
86 video_config.max_frame_rate,
87 video_config.min_playout_delay,
88 video_config.max_playout_delay,
89 video_config.use_external_encoder
90 ? NewFixedCongestionControl(
91 (video_config.min_bitrate + video_config.max_bitrate) / 2)
92 : NewAdaptiveCongestionControl(cast_environment->Clock(),
93 video_config.max_bitrate,
94 video_config.min_bitrate,
95 video_config.max_frame_rate)),
96 frames_in_encoder_(0),
97 last_bitrate_(0),
98 playout_delay_change_cb_(playout_delay_change_cb),
99 last_reported_deadline_utilization_(-1.0),
100 last_reported_lossy_utilization_(-1.0),
101 weak_factory_(this) {
102 video_encoder_ = VideoEncoder::Create(
103 cast_environment_,
104 video_config,
105 status_change_cb,
106 create_vea_cb,
107 create_video_encode_mem_cb);
108 if (!video_encoder_) {
109 cast_environment_->PostTask(
110 CastEnvironment::MAIN,
111 FROM_HERE,
112 base::Bind(status_change_cb, STATUS_UNSUPPORTED_CODEC));
115 media::cast::CastTransportRtpConfig transport_config;
116 transport_config.ssrc = video_config.ssrc;
117 transport_config.feedback_ssrc = video_config.receiver_ssrc;
118 transport_config.rtp_payload_type = video_config.rtp_payload_type;
119 transport_config.aes_key = video_config.aes_key;
120 transport_config.aes_iv_mask = video_config.aes_iv_mask;
122 transport_sender->InitializeVideo(
123 transport_config,
124 base::Bind(&VideoSender::OnReceivedCastFeedback,
125 weak_factory_.GetWeakPtr()),
126 base::Bind(&VideoSender::OnMeasuredRoundTripTime,
127 weak_factory_.GetWeakPtr()));
130 VideoSender::~VideoSender() {
133 void VideoSender::InsertRawVideoFrame(
134 const scoped_refptr<media::VideoFrame>& video_frame,
135 const base::TimeTicks& reference_time) {
136 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
138 if (!video_encoder_) {
139 NOTREACHED();
140 return;
143 const RtpTimestamp rtp_timestamp =
144 TimeDeltaToRtpDelta(video_frame->timestamp(), kVideoFrequency);
145 LogVideoCaptureTimestamps(*cast_environment_, *video_frame, rtp_timestamp);
147 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
148 TRACE_EVENT_INSTANT2(
149 "cast_perf_test", "InsertRawVideoFrame",
150 TRACE_EVENT_SCOPE_THREAD,
151 "timestamp", reference_time.ToInternalValue(),
152 "rtp_timestamp", rtp_timestamp);
154 // Drop the frame if either its RTP or reference timestamp is not an increase
155 // over the last frame's. This protects: 1) the duration calculations that
156 // assume timestamps are monotonically non-decreasing, and 2) assumptions made
157 // deeper in the implementation where each frame's RTP timestamp needs to be
158 // unique.
159 if (!last_enqueued_frame_reference_time_.is_null() &&
160 (!IsNewerRtpTimestamp(rtp_timestamp,
161 last_enqueued_frame_rtp_timestamp_) ||
162 reference_time <= last_enqueued_frame_reference_time_)) {
163 VLOG(1) << "Dropping video frame: RTP or reference time did not increase.";
164 TRACE_EVENT_INSTANT2("cast.stream", "Video Frame Drop",
165 TRACE_EVENT_SCOPE_THREAD,
166 "rtp_timestamp", rtp_timestamp,
167 "reason", "time did not increase");
168 return;
171 // Two video frames are needed to compute the exact media duration added by
172 // the next frame. If there are no frames in the encoder, compute a guess
173 // based on the configured |max_frame_rate_|. Any error introduced by this
174 // guess will be eliminated when |duration_in_encoder_| is updated in
175 // OnEncodedVideoFrame().
176 const base::TimeDelta duration_added_by_next_frame = frames_in_encoder_ > 0 ?
177 reference_time - last_enqueued_frame_reference_time_ :
178 base::TimeDelta::FromSecondsD(1.0 / max_frame_rate_);
180 if (ShouldDropNextFrame(duration_added_by_next_frame)) {
181 base::TimeDelta new_target_delay = std::min(
182 current_round_trip_time_ * kRoundTripsNeeded +
183 base::TimeDelta::FromMilliseconds(kConstantTimeMs),
184 max_playout_delay_);
185 if (new_target_delay > target_playout_delay_) {
186 VLOG(1) << "New target delay: " << new_target_delay.InMilliseconds();
187 playout_delay_change_cb_.Run(new_target_delay);
190 // Some encoder implementations have a frame window for analysis. Since we
191 // are dropping this frame, unless we instruct the encoder to flush all the
192 // frames that have been enqueued for encoding, frames_in_encoder_ and
193 // last_enqueued_frame_reference_time_ will never be updated and we will
194 // drop every subsequent frame for the rest of the session.
195 video_encoder_->EmitFrames();
197 TRACE_EVENT_INSTANT2("cast.stream", "Video Frame Drop",
198 TRACE_EVENT_SCOPE_THREAD,
199 "rtp_timestamp", rtp_timestamp,
200 "reason", "too much in flight");
201 return;
204 if (video_frame->visible_rect().IsEmpty()) {
205 VLOG(1) << "Rejecting empty video frame.";
206 return;
209 const int bitrate = congestion_control_->GetBitrate(
210 reference_time + target_playout_delay_, target_playout_delay_,
211 GetMaximumTargetBitrateForFrame(*video_frame));
212 if (bitrate != last_bitrate_) {
213 video_encoder_->SetBitRate(bitrate);
214 last_bitrate_ = bitrate;
217 TRACE_COUNTER_ID1("cast.stream", "Video Target Bitrate", this, bitrate);
219 MaybeRenderPerformanceMetricsOverlay(bitrate,
220 frames_in_encoder_ + 1,
221 last_reported_deadline_utilization_,
222 last_reported_lossy_utilization_,
223 video_frame.get());
225 if (video_encoder_->EncodeVideoFrame(
226 video_frame,
227 reference_time,
228 base::Bind(&VideoSender::OnEncodedVideoFrame,
229 weak_factory_.GetWeakPtr(),
230 video_frame,
231 bitrate))) {
232 TRACE_EVENT_ASYNC_BEGIN1("cast.stream", "Video Encode", video_frame.get(),
233 "rtp_timestamp", rtp_timestamp);
234 frames_in_encoder_++;
235 duration_in_encoder_ += duration_added_by_next_frame;
236 last_enqueued_frame_rtp_timestamp_ = rtp_timestamp;
237 last_enqueued_frame_reference_time_ = reference_time;
238 } else {
239 VLOG(1) << "Encoder rejected a frame. Skipping...";
240 TRACE_EVENT_INSTANT1("cast.stream", "Video Encode Reject",
241 TRACE_EVENT_SCOPE_THREAD,
242 "rtp_timestamp", rtp_timestamp);
246 scoped_ptr<VideoFrameFactory> VideoSender::CreateVideoFrameFactory() {
247 return video_encoder_ ? video_encoder_->CreateVideoFrameFactory() : nullptr;
250 int VideoSender::GetNumberOfFramesInEncoder() const {
251 return frames_in_encoder_;
254 base::TimeDelta VideoSender::GetInFlightMediaDuration() const {
255 if (GetUnacknowledgedFrameCount() > 0) {
256 const uint32 oldest_unacked_frame_id = latest_acked_frame_id_ + 1;
257 return last_enqueued_frame_reference_time_ -
258 GetRecordedReferenceTime(oldest_unacked_frame_id);
259 } else {
260 return duration_in_encoder_;
264 void VideoSender::OnAck(uint32 frame_id) {
265 video_encoder_->LatestFrameIdToReference(frame_id);
268 // static
269 int VideoSender::GetMaximumTargetBitrateForFrame(
270 const media::VideoFrame& frame) {
271 enum {
272 // Constants used to linearly translate between lines of resolution and a
273 // maximum target bitrate. These values are based on observed quality
274 // trade-offs over a wide range of content. The math will use these values
275 // to compute a bitrate of 2 Mbps for 360 lines of resolution and 4 Mbps for
276 // 720 lines.
277 BITRATE_FOR_HIGH_RESOLUTION = 4000000,
278 BITRATE_FOR_STANDARD_RESOLUTION = 2000000,
279 HIGH_RESOLUTION_LINES = 720,
280 STANDARD_RESOLUTION_LINES = 360,
282 // The smallest maximum target bitrate, regardless of what the math says.
283 MAX_BITRATE_LOWER_BOUND = 1000000,
285 // Constants used to boost the result for high frame rate content.
286 HIGH_FRAME_RATE_THRESHOLD_USEC = 25000, // 40 FPS
287 HIGH_FRAME_RATE_BOOST_NUMERATOR = 3,
288 HIGH_FRAME_RATE_BOOST_DENOMINATOR = 2,
291 // Determine the approximate height of a 16:9 frame having the same area
292 // (number of pixels) as |frame|.
293 const gfx::Size& resolution = frame.visible_rect().size();
294 const int lines_of_resolution =
295 ((resolution.width() * 9) == (resolution.height() * 16)) ?
296 resolution.height() :
297 static_cast<int>(sqrt(resolution.GetArea() * 9.0 / 16.0));
299 // Linearly translate from |lines_of_resolution| to a maximum target bitrate.
300 int64 result = lines_of_resolution - STANDARD_RESOLUTION_LINES;
301 result *= BITRATE_FOR_HIGH_RESOLUTION - BITRATE_FOR_STANDARD_RESOLUTION;
302 result /= HIGH_RESOLUTION_LINES - STANDARD_RESOLUTION_LINES;
303 result += BITRATE_FOR_STANDARD_RESOLUTION;
305 // Boost the result for high frame rate content.
306 base::TimeDelta frame_duration;
307 if (frame.metadata()->GetTimeDelta(media::VideoFrameMetadata::FRAME_DURATION,
308 &frame_duration) &&
309 frame_duration > base::TimeDelta() &&
310 frame_duration.InMicroseconds() <= HIGH_FRAME_RATE_THRESHOLD_USEC) {
311 result *= HIGH_FRAME_RATE_BOOST_NUMERATOR;
312 result /= HIGH_FRAME_RATE_BOOST_DENOMINATOR;
315 // Return a lower-bounded result.
316 return std::max<int>(result, MAX_BITRATE_LOWER_BOUND);
319 void VideoSender::OnEncodedVideoFrame(
320 const scoped_refptr<media::VideoFrame>& video_frame,
321 int encoder_bitrate,
322 scoped_ptr<SenderEncodedFrame> encoded_frame) {
323 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
325 frames_in_encoder_--;
326 DCHECK_GE(frames_in_encoder_, 0);
328 duration_in_encoder_ =
329 last_enqueued_frame_reference_time_ - encoded_frame->reference_time;
331 last_reported_deadline_utilization_ = encoded_frame->deadline_utilization;
332 last_reported_lossy_utilization_ = encoded_frame->lossy_utilization;
334 TRACE_EVENT_ASYNC_END2("cast.stream", "Video Encode", video_frame.get(),
335 "deadline_utilization", last_reported_deadline_utilization_,
336 "lossy_utilization", last_reported_lossy_utilization_);
338 // Report the resource utilization for processing this frame. Take the
339 // greater of the two utilization values and attenuate them such that the
340 // target utilization is reported as the maximum sustainable amount.
341 const double attenuated_utilization =
342 std::max(last_reported_deadline_utilization_,
343 last_reported_lossy_utilization_) /
344 (kTargetUtilizationPercentage / 100.0);
345 if (attenuated_utilization >= 0.0) {
346 // Key frames are artificially capped to 1.0 because their actual
347 // utilization is atypical compared to the other frames in the stream, and
348 // this can misguide the producer of the input video frames.
349 video_frame->metadata()->SetDouble(
350 media::VideoFrameMetadata::RESOURCE_UTILIZATION,
351 encoded_frame->dependency == EncodedFrame::KEY ?
352 std::min(1.0, attenuated_utilization) : attenuated_utilization);
355 SendEncodedFrame(encoder_bitrate, encoded_frame.Pass());
358 } // namespace cast
359 } // namespace media