Revert "Omit calls to set composing region when pasting image."
[chromium-blink-merge.git] / media / cast / test / simulator.cc
blob540f377e3e1d774495d526828dea2e5f21389c14
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // Simulate end to end streaming.
6 //
7 // Input:
8 // --source=
9 // WebM used as the source of video and audio frames.
10 // --output=
11 // File path to writing out the raw event log of the simulation session.
12 // --sim-id=
13 // Unique simulation ID.
14 // --target-delay-ms=
15 // Target playout delay to configure (integer number of milliseconds).
16 // Optional; default is 400.
17 // --max-frame-rate=
18 // The maximum frame rate allowed at any time during the Cast session.
19 // Optional; default is 30.
20 // --source-frame-rate=
21 // Overrides the playback rate; the source video will play faster/slower.
22 // --run-time=
23 // In seconds, how long the Cast session runs for.
24 // Optional; default is 180.
25 // --metrics-output=
26 // File path to write PSNR and SSIM metrics between source frames and
27 // decoded frames. Assumes all encoded frames are decoded.
28 // --yuv-output=
29 // File path to write YUV decoded frames in YUV4MPEG2 format.
30 // --no-simulation
31 // Do not run network simulation.
33 // Output:
34 // - Raw event log of the simulation session tagged with the unique test ID,
35 // written out to the specified file path.
37 #include "base/at_exit.h"
38 #include "base/base_paths.h"
39 #include "base/command_line.h"
40 #include "base/files/file_path.h"
41 #include "base/files/file_util.h"
42 #include "base/files/memory_mapped_file.h"
43 #include "base/files/scoped_file.h"
44 #include "base/json/json_writer.h"
45 #include "base/logging.h"
46 #include "base/path_service.h"
47 #include "base/strings/string_number_conversions.h"
48 #include "base/strings/stringprintf.h"
49 #include "base/test/simple_test_tick_clock.h"
50 #include "base/thread_task_runner_handle.h"
51 #include "base/time/tick_clock.h"
52 #include "base/values.h"
53 #include "media/base/audio_bus.h"
54 #include "media/base/media.h"
55 #include "media/base/video_frame.h"
56 #include "media/cast/cast_config.h"
57 #include "media/cast/cast_environment.h"
58 #include "media/cast/cast_receiver.h"
59 #include "media/cast/cast_sender.h"
60 #include "media/cast/logging/encoding_event_subscriber.h"
61 #include "media/cast/logging/log_serializer.h"
62 #include "media/cast/logging/logging_defines.h"
63 #include "media/cast/logging/proto/raw_events.pb.h"
64 #include "media/cast/logging/raw_event_subscriber_bundle.h"
65 #include "media/cast/logging/simple_event_subscriber.h"
66 #include "media/cast/net/cast_transport_config.h"
67 #include "media/cast/net/cast_transport_defines.h"
68 #include "media/cast/net/cast_transport_sender.h"
69 #include "media/cast/net/cast_transport_sender_impl.h"
70 #include "media/cast/test/fake_media_source.h"
71 #include "media/cast/test/fake_single_thread_task_runner.h"
72 #include "media/cast/test/loopback_transport.h"
73 #include "media/cast/test/proto/network_simulation_model.pb.h"
74 #include "media/cast/test/skewed_tick_clock.h"
75 #include "media/cast/test/utility/audio_utility.h"
76 #include "media/cast/test/utility/default_config.h"
77 #include "media/cast/test/utility/test_util.h"
78 #include "media/cast/test/utility/udp_proxy.h"
79 #include "media/cast/test/utility/video_utility.h"
81 using media::cast::proto::IPPModel;
82 using media::cast::proto::NetworkSimulationModel;
83 using media::cast::proto::NetworkSimulationModelType;
85 namespace media {
86 namespace cast {
87 namespace {
88 const char kLibDir[] = "lib-dir";
89 const char kModelPath[] = "model";
90 const char kMetricsOutputPath[] = "metrics-output";
91 const char kOutputPath[] = "output";
92 const char kMaxFrameRate[] = "max-frame-rate";
93 const char kNoSimulation[] = "no-simulation";
94 const char kRunTime[] = "run-time";
95 const char kSimulationId[] = "sim-id";
96 const char kSourcePath[] = "source";
97 const char kSourceFrameRate[] = "source-frame-rate";
98 const char kTargetDelay[] = "target-delay-ms";
99 const char kYuvOutputPath[] = "yuv-output";
101 int GetIntegerSwitchValue(const char* switch_name, int default_value) {
102 const std::string as_str =
103 base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(switch_name);
104 if (as_str.empty())
105 return default_value;
106 int as_int;
107 CHECK(base::StringToInt(as_str, &as_int));
108 CHECK_GT(as_int, 0);
109 return as_int;
112 void UpdateCastTransportStatus(CastTransportStatus status) {
113 LOG(INFO) << "Cast transport status: " << status;
116 void LogAudioOperationalStatus(OperationalStatus status) {
117 LOG(INFO) << "Audio status: " << status;
120 void LogVideoOperationalStatus(OperationalStatus status) {
121 LOG(INFO) << "Video status: " << status;
124 void LogTransportEvents(const scoped_refptr<CastEnvironment>& env,
125 const std::vector<PacketEvent>& packet_events,
126 const std::vector<FrameEvent>& frame_events) {
127 for (std::vector<media::cast::PacketEvent>::const_iterator it =
128 packet_events.begin();
129 it != packet_events.end();
130 ++it) {
131 env->Logging()->InsertPacketEvent(it->timestamp,
132 it->type,
133 it->media_type,
134 it->rtp_timestamp,
135 it->frame_id,
136 it->packet_id,
137 it->max_packet_id,
138 it->size);
140 for (std::vector<media::cast::FrameEvent>::const_iterator it =
141 frame_events.begin();
142 it != frame_events.end();
143 ++it) {
144 if (it->type == FRAME_PLAYOUT) {
145 env->Logging()->InsertFrameEventWithDelay(
146 it->timestamp,
147 it->type,
148 it->media_type,
149 it->rtp_timestamp,
150 it->frame_id,
151 it->delay_delta);
152 } else {
153 env->Logging()->InsertFrameEvent(
154 it->timestamp,
155 it->type,
156 it->media_type,
157 it->rtp_timestamp,
158 it->frame_id);
163 // Maintains a queue of encoded video frames.
164 // This works by tracking FRAME_CAPTURE_END and FRAME_ENCODED events.
165 // If a video frame is detected to be encoded it transfers a frame
166 // from FakeMediaSource to its internal queue. Otherwise it drops a
167 // frame from FakeMediaSource.
168 class EncodedVideoFrameTracker : public RawEventSubscriber {
169 public:
170 EncodedVideoFrameTracker(FakeMediaSource* media_source)
171 : media_source_(media_source),
172 last_frame_event_type_(UNKNOWN) {}
173 ~EncodedVideoFrameTracker() final {}
175 // RawEventSubscriber implementations.
176 void OnReceiveFrameEvent(const FrameEvent& frame_event) final {
177 // This method only cares about video FRAME_CAPTURE_END and
178 // FRAME_ENCODED events.
179 if (frame_event.media_type != VIDEO_EVENT) {
180 return;
182 if (frame_event.type != FRAME_CAPTURE_END &&
183 frame_event.type != FRAME_ENCODED) {
184 return;
186 // If there are two consecutive FRAME_CAPTURE_END events that means
187 // a frame is dropped.
188 if (last_frame_event_type_ == FRAME_CAPTURE_END &&
189 frame_event.type == FRAME_CAPTURE_END) {
190 media_source_->PopOldestInsertedVideoFrame();
192 if (frame_event.type == FRAME_ENCODED) {
193 video_frames_.push(media_source_->PopOldestInsertedVideoFrame());
195 last_frame_event_type_ = frame_event.type;
198 void OnReceivePacketEvent(const PacketEvent& packet_event) final {
199 // Don't care.
202 scoped_refptr<media::VideoFrame> PopOldestEncodedFrame() {
203 CHECK(!video_frames_.empty());
204 scoped_refptr<media::VideoFrame> video_frame = video_frames_.front();
205 video_frames_.pop();
206 return video_frame;
209 private:
210 FakeMediaSource* media_source_;
211 CastLoggingEvent last_frame_event_type_;
212 std::queue<scoped_refptr<media::VideoFrame> > video_frames_;
214 DISALLOW_COPY_AND_ASSIGN(EncodedVideoFrameTracker);
217 // Appends a YUV frame in I420 format to the file located at |path|.
218 void AppendYuvToFile(const base::FilePath& path,
219 scoped_refptr<media::VideoFrame> frame) {
220 // Write YUV420 format to file.
221 std::string header;
222 base::StringAppendF(
223 &header, "FRAME W%d H%d\n",
224 frame->coded_size().width(),
225 frame->coded_size().height());
226 AppendToFile(path, header.data(), header.size());
227 AppendToFile(path,
228 reinterpret_cast<char*>(frame->data(media::VideoFrame::kYPlane)),
229 frame->stride(media::VideoFrame::kYPlane) *
230 frame->rows(media::VideoFrame::kYPlane));
231 AppendToFile(path,
232 reinterpret_cast<char*>(frame->data(media::VideoFrame::kUPlane)),
233 frame->stride(media::VideoFrame::kUPlane) *
234 frame->rows(media::VideoFrame::kUPlane));
235 AppendToFile(path,
236 reinterpret_cast<char*>(frame->data(media::VideoFrame::kVPlane)),
237 frame->stride(media::VideoFrame::kVPlane) *
238 frame->rows(media::VideoFrame::kVPlane));
241 // A container to save output of GotVideoFrame() for computation based
242 // on output frames.
243 struct GotVideoFrameOutput {
244 GotVideoFrameOutput() : counter(0) {}
245 int counter;
246 std::vector<double> psnr;
247 std::vector<double> ssim;
250 void GotVideoFrame(
251 GotVideoFrameOutput* metrics_output,
252 const base::FilePath& yuv_output,
253 EncodedVideoFrameTracker* video_frame_tracker,
254 CastReceiver* cast_receiver,
255 const scoped_refptr<media::VideoFrame>& video_frame,
256 const base::TimeTicks& render_time,
257 bool continuous) {
258 ++metrics_output->counter;
259 cast_receiver->RequestDecodedVideoFrame(
260 base::Bind(&GotVideoFrame, metrics_output, yuv_output,
261 video_frame_tracker, cast_receiver));
263 // If |video_frame_tracker| is available that means we're computing
264 // quality metrices.
265 if (video_frame_tracker) {
266 scoped_refptr<media::VideoFrame> src_frame =
267 video_frame_tracker->PopOldestEncodedFrame();
268 metrics_output->psnr.push_back(I420PSNR(src_frame, video_frame));
269 metrics_output->ssim.push_back(I420SSIM(src_frame, video_frame));
272 if (!yuv_output.empty()) {
273 AppendYuvToFile(yuv_output, video_frame);
277 void GotAudioFrame(
278 int* counter,
279 CastReceiver* cast_receiver,
280 scoped_ptr<AudioBus> audio_bus,
281 const base::TimeTicks& playout_time,
282 bool is_continuous) {
283 ++*counter;
284 cast_receiver->RequestDecodedAudioFrame(
285 base::Bind(&GotAudioFrame, counter, cast_receiver));
288 // Serialize |frame_events| and |packet_events| and append to the file
289 // located at |output_path|.
290 void AppendLogToFile(media::cast::proto::LogMetadata* metadata,
291 const media::cast::FrameEventList& frame_events,
292 const media::cast::PacketEventList& packet_events,
293 const base::FilePath& output_path) {
294 media::cast::proto::GeneralDescription* gen_desc =
295 metadata->mutable_general_description();
296 gen_desc->set_product("Cast Simulator");
297 gen_desc->set_product_version("0.1");
299 scoped_ptr<char[]> serialized_log(new char[media::cast::kMaxSerializedBytes]);
300 int output_bytes;
301 bool success = media::cast::SerializeEvents(*metadata,
302 frame_events,
303 packet_events,
304 true,
305 media::cast::kMaxSerializedBytes,
306 serialized_log.get(),
307 &output_bytes);
309 if (!success) {
310 LOG(ERROR) << "Failed to serialize log.";
311 return;
314 if (!AppendToFile(output_path, serialized_log.get(), output_bytes)) {
315 LOG(ERROR) << "Failed to append to log.";
319 // Run simulation once.
321 // |log_output_path| is the path to write serialized log.
322 // |extra_data| is extra tagging information to write to log.
323 void RunSimulation(const base::FilePath& source_path,
324 const base::FilePath& log_output_path,
325 const base::FilePath& metrics_output_path,
326 const base::FilePath& yuv_output_path,
327 const std::string& extra_data,
328 const NetworkSimulationModel& model) {
329 // Fake clock. Make sure start time is non zero.
330 base::SimpleTestTickClock testing_clock;
331 testing_clock.Advance(base::TimeDelta::FromSeconds(1));
333 // Task runner.
334 scoped_refptr<test::FakeSingleThreadTaskRunner> task_runner =
335 new test::FakeSingleThreadTaskRunner(&testing_clock);
336 base::ThreadTaskRunnerHandle task_runner_handle(task_runner);
338 // CastEnvironments.
339 scoped_refptr<CastEnvironment> sender_env =
340 new CastEnvironment(
341 scoped_ptr<base::TickClock>(
342 new test::SkewedTickClock(&testing_clock)).Pass(),
343 task_runner,
344 task_runner,
345 task_runner);
346 scoped_refptr<CastEnvironment> receiver_env =
347 new CastEnvironment(
348 scoped_ptr<base::TickClock>(
349 new test::SkewedTickClock(&testing_clock)).Pass(),
350 task_runner,
351 task_runner,
352 task_runner);
354 // Event subscriber. Store at most 1 hour of events.
355 EncodingEventSubscriber audio_event_subscriber(AUDIO_EVENT,
356 100 * 60 * 60);
357 EncodingEventSubscriber video_event_subscriber(VIDEO_EVENT,
358 30 * 60 * 60);
359 sender_env->Logging()->AddRawEventSubscriber(&audio_event_subscriber);
360 sender_env->Logging()->AddRawEventSubscriber(&video_event_subscriber);
362 // Audio sender config.
363 AudioSenderConfig audio_sender_config = GetDefaultAudioSenderConfig();
364 audio_sender_config.min_playout_delay =
365 audio_sender_config.max_playout_delay = base::TimeDelta::FromMilliseconds(
366 GetIntegerSwitchValue(kTargetDelay, 400));
368 // Audio receiver config.
369 FrameReceiverConfig audio_receiver_config =
370 GetDefaultAudioReceiverConfig();
371 audio_receiver_config.rtp_max_delay_ms =
372 audio_sender_config.max_playout_delay.InMilliseconds();
374 // Video sender config.
375 VideoSenderConfig video_sender_config = GetDefaultVideoSenderConfig();
376 video_sender_config.max_bitrate = 2500000;
377 video_sender_config.min_bitrate = 2000000;
378 video_sender_config.start_bitrate = 2000000;
379 video_sender_config.min_playout_delay =
380 video_sender_config.max_playout_delay =
381 audio_sender_config.max_playout_delay;
382 video_sender_config.max_frame_rate = GetIntegerSwitchValue(kMaxFrameRate, 30);
384 // Video receiver config.
385 FrameReceiverConfig video_receiver_config =
386 GetDefaultVideoReceiverConfig();
387 video_receiver_config.rtp_max_delay_ms =
388 video_sender_config.max_playout_delay.InMilliseconds();
390 // Loopback transport.
391 LoopBackTransport receiver_to_sender(receiver_env);
392 LoopBackTransport sender_to_receiver(sender_env);
394 struct PacketProxy {
395 PacketProxy() : receiver(NULL) {}
396 void ReceivePacket(scoped_ptr<Packet> packet) {
397 if (receiver)
398 receiver->ReceivePacket(packet.Pass());
400 CastReceiver* receiver;
403 PacketProxy packet_proxy;
405 // Cast receiver.
406 scoped_ptr<CastTransportSender> transport_receiver(
407 new CastTransportSenderImpl(
408 NULL,
409 &testing_clock,
410 net::IPEndPoint(),
411 net::IPEndPoint(),
412 make_scoped_ptr(new base::DictionaryValue),
413 base::Bind(&UpdateCastTransportStatus),
414 base::Bind(&LogTransportEvents, receiver_env),
415 base::TimeDelta::FromSeconds(1),
416 task_runner,
417 base::Bind(&PacketProxy::ReceivePacket,
418 base::Unretained(&packet_proxy)),
419 &receiver_to_sender));
420 scoped_ptr<CastReceiver> cast_receiver(
421 CastReceiver::Create(receiver_env,
422 audio_receiver_config,
423 video_receiver_config,
424 transport_receiver.get()));
426 packet_proxy.receiver = cast_receiver.get();
428 // Cast sender and transport sender.
429 scoped_ptr<CastTransportSender> transport_sender(
430 new CastTransportSenderImpl(
431 NULL,
432 &testing_clock,
433 net::IPEndPoint(),
434 net::IPEndPoint(),
435 make_scoped_ptr(new base::DictionaryValue),
436 base::Bind(&UpdateCastTransportStatus),
437 base::Bind(&LogTransportEvents, sender_env),
438 base::TimeDelta::FromSeconds(1),
439 task_runner,
440 PacketReceiverCallback(),
441 &sender_to_receiver));
442 scoped_ptr<CastSender> cast_sender(
443 CastSender::Create(sender_env, transport_sender.get()));
445 // Initialize network simulation model.
446 const bool use_network_simulation =
447 model.type() == media::cast::proto::INTERRUPTED_POISSON_PROCESS;
448 scoped_ptr<test::InterruptedPoissonProcess> ipp;
449 if (use_network_simulation) {
450 LOG(INFO) << "Running Poisson based network simulation.";
451 const IPPModel& ipp_model = model.ipp();
452 std::vector<double> average_rates(ipp_model.average_rate_size());
453 std::copy(ipp_model.average_rate().begin(),
454 ipp_model.average_rate().end(),
455 average_rates.begin());
456 ipp.reset(new test::InterruptedPoissonProcess(
457 average_rates,
458 ipp_model.coef_burstiness(), ipp_model.coef_variance(), 0));
459 receiver_to_sender.Initialize(
460 ipp->NewBuffer(128 * 1024).Pass(),
461 transport_sender->PacketReceiverForTesting(),
462 task_runner, &testing_clock);
463 sender_to_receiver.Initialize(
464 ipp->NewBuffer(128 * 1024).Pass(),
465 transport_receiver->PacketReceiverForTesting(), task_runner,
466 &testing_clock);
467 } else {
468 LOG(INFO) << "No network simulation.";
469 receiver_to_sender.Initialize(
470 scoped_ptr<test::PacketPipe>(),
471 transport_sender->PacketReceiverForTesting(),
472 task_runner, &testing_clock);
473 sender_to_receiver.Initialize(
474 scoped_ptr<test::PacketPipe>(),
475 transport_receiver->PacketReceiverForTesting(), task_runner,
476 &testing_clock);
479 // Initialize a fake media source and a tracker to encoded video frames.
480 const bool quality_test = !metrics_output_path.empty();
481 FakeMediaSource media_source(task_runner,
482 &testing_clock,
483 audio_sender_config,
484 video_sender_config,
485 quality_test);
486 scoped_ptr<EncodedVideoFrameTracker> video_frame_tracker;
487 if (quality_test) {
488 video_frame_tracker.reset(new EncodedVideoFrameTracker(&media_source));
489 sender_env->Logging()->AddRawEventSubscriber(video_frame_tracker.get());
492 // Quality metrics computed for each frame decoded.
493 GotVideoFrameOutput metrics_output;
495 // Start receiver.
496 int audio_frame_count = 0;
497 cast_receiver->RequestDecodedVideoFrame(
498 base::Bind(&GotVideoFrame, &metrics_output, yuv_output_path,
499 video_frame_tracker.get(), cast_receiver.get()));
500 cast_receiver->RequestDecodedAudioFrame(
501 base::Bind(&GotAudioFrame, &audio_frame_count, cast_receiver.get()));
503 // Initializing audio and video senders.
504 cast_sender->InitializeAudio(audio_sender_config,
505 base::Bind(&LogAudioOperationalStatus));
506 cast_sender->InitializeVideo(media_source.get_video_config(),
507 base::Bind(&LogVideoOperationalStatus),
508 CreateDefaultVideoEncodeAcceleratorCallback(),
509 CreateDefaultVideoEncodeMemoryCallback());
510 task_runner->RunTasks();
512 // Truncate YUV files to prepare for writing.
513 if (!yuv_output_path.empty()) {
514 base::ScopedFILE file(base::OpenFile(yuv_output_path, "wb"));
515 if (!file.get()) {
516 LOG(ERROR) << "Cannot save YUV output to file.";
517 return;
519 LOG(INFO) << "Writing YUV output to file: " << yuv_output_path.value();
521 // Write YUV4MPEG2 header.
522 const std::string header("YUV4MPEG2 W1280 H720 F30000:1001 Ip A1:1 C420\n");
523 AppendToFile(yuv_output_path, header.data(), header.size());
526 // Start sending.
527 if (!source_path.empty()) {
528 // 0 means using the FPS from the file.
529 media_source.SetSourceFile(source_path,
530 GetIntegerSwitchValue(kSourceFrameRate, 0));
532 media_source.Start(cast_sender->audio_frame_input(),
533 cast_sender->video_frame_input());
535 // By default runs simulation for 3 minutes or the desired duration
536 // by using --run-time= flag.
537 base::TimeDelta elapsed_time;
538 const base::TimeDelta desired_run_time =
539 base::TimeDelta::FromSeconds(GetIntegerSwitchValue(kRunTime, 180));
540 while (elapsed_time < desired_run_time) {
541 // Each step is 100us.
542 base::TimeDelta step = base::TimeDelta::FromMicroseconds(100);
543 task_runner->Sleep(step);
544 elapsed_time += step;
547 // Get event logs for audio and video.
548 media::cast::proto::LogMetadata audio_metadata, video_metadata;
549 media::cast::FrameEventList audio_frame_events, video_frame_events;
550 media::cast::PacketEventList audio_packet_events, video_packet_events;
551 audio_metadata.set_extra_data(extra_data);
552 video_metadata.set_extra_data(extra_data);
553 audio_event_subscriber.GetEventsAndReset(
554 &audio_metadata, &audio_frame_events, &audio_packet_events);
555 video_event_subscriber.GetEventsAndReset(
556 &video_metadata, &video_frame_events, &video_packet_events);
558 // Print simulation results.
560 // Compute and print statistics for video:
562 // * Total video frames captured.
563 // * Total video frames encoded.
564 // * Total video frames dropped.
565 // * Total video frames received late.
566 // * Average target bitrate.
567 // * Average encoded bitrate.
568 int total_video_frames = 0;
569 int encoded_video_frames = 0;
570 int dropped_video_frames = 0;
571 int late_video_frames = 0;
572 int64 total_delay_of_late_frames_ms = 0;
573 int64 encoded_size = 0;
574 int64 target_bitrate = 0;
575 for (size_t i = 0; i < video_frame_events.size(); ++i) {
576 const media::cast::proto::AggregatedFrameEvent& event =
577 *video_frame_events[i];
578 ++total_video_frames;
579 if (event.has_encoded_frame_size()) {
580 ++encoded_video_frames;
581 encoded_size += event.encoded_frame_size();
582 target_bitrate += event.target_bitrate();
583 } else {
584 ++dropped_video_frames;
586 if (event.has_delay_millis() && event.delay_millis() < 0) {
587 ++late_video_frames;
588 total_delay_of_late_frames_ms += -event.delay_millis();
592 // Subtract fraction of dropped frames from |elapsed_time| before estimating
593 // the average encoded bitrate.
594 const base::TimeDelta elapsed_time_undropped =
595 total_video_frames <= 0 ? base::TimeDelta() :
596 (elapsed_time * (total_video_frames - dropped_video_frames) /
597 total_video_frames);
598 const double avg_encoded_bitrate =
599 elapsed_time_undropped <= base::TimeDelta() ? 0 :
600 8.0 * encoded_size / elapsed_time_undropped.InSecondsF() / 1000;
601 double avg_target_bitrate =
602 !encoded_video_frames ? 0 : target_bitrate / encoded_video_frames / 1000;
604 LOG(INFO) << "Configured target playout delay (ms): "
605 << video_receiver_config.rtp_max_delay_ms;
606 LOG(INFO) << "Audio frame count: " << audio_frame_count;
607 LOG(INFO) << "Inserted video frames: " << total_video_frames;
608 LOG(INFO) << "Decoded video frames: " << metrics_output.counter;
609 LOG(INFO) << "Dropped video frames: " << dropped_video_frames;
610 LOG(INFO) << "Late video frames: " << late_video_frames
611 << " (average lateness: "
612 << (late_video_frames > 0 ?
613 static_cast<double>(total_delay_of_late_frames_ms) /
614 late_video_frames :
616 << " ms)";
617 LOG(INFO) << "Average encoded bitrate (kbps): " << avg_encoded_bitrate;
618 LOG(INFO) << "Average target bitrate (kbps): " << avg_target_bitrate;
619 LOG(INFO) << "Writing log: " << log_output_path.value();
621 // Truncate file and then write serialized log.
623 base::ScopedFILE file(base::OpenFile(log_output_path, "wb"));
624 if (!file.get()) {
625 LOG(INFO) << "Cannot write to log.";
626 return;
629 AppendLogToFile(&video_metadata, video_frame_events, video_packet_events,
630 log_output_path);
631 AppendLogToFile(&audio_metadata, audio_frame_events, audio_packet_events,
632 log_output_path);
634 // Write quality metrics.
635 if (quality_test) {
636 LOG(INFO) << "Writing quality metrics: " << metrics_output_path.value();
637 std::string line;
638 for (size_t i = 0; i < metrics_output.psnr.size() &&
639 i < metrics_output.ssim.size(); ++i) {
640 base::StringAppendF(&line, "%f %f\n", metrics_output.psnr[i],
641 metrics_output.ssim[i]);
643 WriteFile(metrics_output_path, line.data(), line.length());
647 NetworkSimulationModel DefaultModel() {
648 NetworkSimulationModel model;
649 model.set_type(cast::proto::INTERRUPTED_POISSON_PROCESS);
650 IPPModel* ipp = model.mutable_ipp();
651 ipp->set_coef_burstiness(0.609);
652 ipp->set_coef_variance(4.1);
654 ipp->add_average_rate(0.609);
655 ipp->add_average_rate(0.495);
656 ipp->add_average_rate(0.561);
657 ipp->add_average_rate(0.458);
658 ipp->add_average_rate(0.538);
659 ipp->add_average_rate(0.513);
660 ipp->add_average_rate(0.585);
661 ipp->add_average_rate(0.592);
662 ipp->add_average_rate(0.658);
663 ipp->add_average_rate(0.556);
664 ipp->add_average_rate(0.371);
665 ipp->add_average_rate(0.595);
666 ipp->add_average_rate(0.490);
667 ipp->add_average_rate(0.980);
668 ipp->add_average_rate(0.781);
669 ipp->add_average_rate(0.463);
671 return model;
674 bool IsModelValid(const NetworkSimulationModel& model) {
675 if (!model.has_type())
676 return false;
677 NetworkSimulationModelType type = model.type();
678 if (type == media::cast::proto::INTERRUPTED_POISSON_PROCESS) {
679 if (!model.has_ipp())
680 return false;
681 const IPPModel& ipp = model.ipp();
682 if (ipp.coef_burstiness() <= 0.0 || ipp.coef_variance() <= 0.0)
683 return false;
684 if (ipp.average_rate_size() == 0)
685 return false;
686 for (int i = 0; i < ipp.average_rate_size(); i++) {
687 if (ipp.average_rate(i) <= 0.0)
688 return false;
692 return true;
695 NetworkSimulationModel LoadModel(const base::FilePath& model_path) {
696 if (base::CommandLine::ForCurrentProcess()->HasSwitch(kNoSimulation)) {
697 NetworkSimulationModel model;
698 model.set_type(media::cast::proto::NO_SIMULATION);
699 return model;
701 if (model_path.empty()) {
702 LOG(ERROR) << "Model path not set; Using default model.";
703 return DefaultModel();
705 std::string model_str;
706 if (!base::ReadFileToString(model_path, &model_str)) {
707 LOG(ERROR) << "Failed to read model file.";
708 return DefaultModel();
711 NetworkSimulationModel model;
712 if (!model.ParseFromString(model_str)) {
713 LOG(ERROR) << "Failed to parse model.";
714 return DefaultModel();
716 if (!IsModelValid(model)) {
717 LOG(ERROR) << "Invalid model.";
718 return DefaultModel();
721 return model;
724 } // namespace
725 } // namespace cast
726 } // namespace media
728 int main(int argc, char** argv) {
729 base::AtExitManager at_exit;
730 base::CommandLine::Init(argc, argv);
731 InitLogging(logging::LoggingSettings());
733 const base::CommandLine* cmd = base::CommandLine::ForCurrentProcess();
734 base::FilePath media_path = cmd->GetSwitchValuePath(media::cast::kLibDir);
735 if (media_path.empty()) {
736 if (!PathService::Get(base::DIR_MODULE, &media_path)) {
737 LOG(ERROR) << "Failed to load FFmpeg.";
738 return 1;
742 media::InitializeMediaLibrary();
744 base::FilePath source_path = cmd->GetSwitchValuePath(
745 media::cast::kSourcePath);
746 base::FilePath log_output_path = cmd->GetSwitchValuePath(
747 media::cast::kOutputPath);
748 if (log_output_path.empty()) {
749 base::GetTempDir(&log_output_path);
750 log_output_path = log_output_path.AppendASCII("sim-events.gz");
752 base::FilePath metrics_output_path = cmd->GetSwitchValuePath(
753 media::cast::kMetricsOutputPath);
754 base::FilePath yuv_output_path = cmd->GetSwitchValuePath(
755 media::cast::kYuvOutputPath);
756 std::string sim_id = cmd->GetSwitchValueASCII(media::cast::kSimulationId);
758 NetworkSimulationModel model = media::cast::LoadModel(
759 cmd->GetSwitchValuePath(media::cast::kModelPath));
761 base::DictionaryValue values;
762 values.SetBoolean("sim", true);
763 values.SetString("sim-id", sim_id);
765 std::string extra_data;
766 base::JSONWriter::Write(values, &extra_data);
768 // Run.
769 media::cast::RunSimulation(source_path, log_output_path, metrics_output_path,
770 yuv_output_path, extra_data, model);
771 return 0;