Fix IsRenderFrameLive and use it in RenderFrameHostManager.
[chromium-blink-merge.git] / media / ffmpeg / ffmpeg_common.cc
blob3b92d3db706728c61084a88bf284bb9e56d748f3
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/ffmpeg/ffmpeg_common.h"
7 #include "base/basictypes.h"
8 #include "base/logging.h"
9 #include "base/metrics/histogram.h"
10 #include "base/strings/string_number_conversions.h"
11 #include "base/strings/string_util.h"
12 #include "media/base/decoder_buffer.h"
13 #include "media/base/video_frame.h"
14 #include "media/base/video_util.h"
16 namespace media {
18 // Why FF_INPUT_BUFFER_PADDING_SIZE? FFmpeg assumes all input buffers are
19 // padded. Check here to ensure FFmpeg only receives data padded to its
20 // specifications.
21 COMPILE_ASSERT(DecoderBuffer::kPaddingSize >= FF_INPUT_BUFFER_PADDING_SIZE,
22 decoder_buffer_padding_size_does_not_fit_ffmpeg_requirement);
24 // Alignment requirement by FFmpeg for input and output buffers. This need to
25 // be updated to match FFmpeg when it changes.
26 #if defined(ARCH_CPU_ARM_FAMILY)
27 static const int kFFmpegBufferAddressAlignment = 16;
28 #else
29 static const int kFFmpegBufferAddressAlignment = 32;
30 #endif
32 // Check here to ensure FFmpeg only receives data aligned to its specifications.
33 COMPILE_ASSERT(
34 DecoderBuffer::kAlignmentSize >= kFFmpegBufferAddressAlignment &&
35 DecoderBuffer::kAlignmentSize % kFFmpegBufferAddressAlignment == 0,
36 decoder_buffer_alignment_size_does_not_fit_ffmpeg_requirement);
38 // Allows faster SIMD YUV convert. Also, FFmpeg overreads/-writes occasionally.
39 // See video_get_buffer() in libavcodec/utils.c.
40 static const int kFFmpegOutputBufferPaddingSize = 16;
42 COMPILE_ASSERT(VideoFrame::kFrameSizePadding >= kFFmpegOutputBufferPaddingSize,
43 video_frame_padding_size_does_not_fit_ffmpeg_requirement);
45 COMPILE_ASSERT(
46 VideoFrame::kFrameAddressAlignment >= kFFmpegBufferAddressAlignment &&
47 VideoFrame::kFrameAddressAlignment % kFFmpegBufferAddressAlignment == 0,
48 video_frame_address_alignment_does_not_fit_ffmpeg_requirement);
50 static const AVRational kMicrosBase = { 1, base::Time::kMicrosecondsPerSecond };
52 base::TimeDelta ConvertFromTimeBase(const AVRational& time_base,
53 int64 timestamp) {
54 int64 microseconds = av_rescale_q(timestamp, time_base, kMicrosBase);
55 return base::TimeDelta::FromMicroseconds(microseconds);
58 int64 ConvertToTimeBase(const AVRational& time_base,
59 const base::TimeDelta& timestamp) {
60 return av_rescale_q(timestamp.InMicroseconds(), kMicrosBase, time_base);
63 // Converts an FFmpeg audio codec ID into its corresponding supported codec id.
64 static AudioCodec CodecIDToAudioCodec(AVCodecID codec_id) {
65 switch (codec_id) {
66 case AV_CODEC_ID_AAC:
67 return kCodecAAC;
68 case AV_CODEC_ID_MP3:
69 return kCodecMP3;
70 case AV_CODEC_ID_VORBIS:
71 return kCodecVorbis;
72 case AV_CODEC_ID_PCM_U8:
73 case AV_CODEC_ID_PCM_S16LE:
74 case AV_CODEC_ID_PCM_S24LE:
75 case AV_CODEC_ID_PCM_F32LE:
76 return kCodecPCM;
77 case AV_CODEC_ID_PCM_S16BE:
78 return kCodecPCM_S16BE;
79 case AV_CODEC_ID_PCM_S24BE:
80 return kCodecPCM_S24BE;
81 case AV_CODEC_ID_FLAC:
82 return kCodecFLAC;
83 case AV_CODEC_ID_AMR_NB:
84 return kCodecAMR_NB;
85 case AV_CODEC_ID_AMR_WB:
86 return kCodecAMR_WB;
87 case AV_CODEC_ID_GSM_MS:
88 return kCodecGSM_MS;
89 case AV_CODEC_ID_PCM_ALAW:
90 return kCodecPCM_ALAW;
91 case AV_CODEC_ID_PCM_MULAW:
92 return kCodecPCM_MULAW;
93 case AV_CODEC_ID_OPUS:
94 return kCodecOpus;
95 default:
96 DVLOG(1) << "Unknown audio CodecID: " << codec_id;
98 return kUnknownAudioCodec;
101 static AVCodecID AudioCodecToCodecID(AudioCodec audio_codec,
102 SampleFormat sample_format) {
103 switch (audio_codec) {
104 case kCodecAAC:
105 return AV_CODEC_ID_AAC;
106 case kCodecMP3:
107 return AV_CODEC_ID_MP3;
108 case kCodecPCM:
109 switch (sample_format) {
110 case kSampleFormatU8:
111 return AV_CODEC_ID_PCM_U8;
112 case kSampleFormatS16:
113 return AV_CODEC_ID_PCM_S16LE;
114 case kSampleFormatS32:
115 return AV_CODEC_ID_PCM_S24LE;
116 case kSampleFormatF32:
117 return AV_CODEC_ID_PCM_F32LE;
118 default:
119 DVLOG(1) << "Unsupported sample format: " << sample_format;
121 break;
122 case kCodecPCM_S16BE:
123 return AV_CODEC_ID_PCM_S16BE;
124 case kCodecPCM_S24BE:
125 return AV_CODEC_ID_PCM_S24BE;
126 case kCodecVorbis:
127 return AV_CODEC_ID_VORBIS;
128 case kCodecFLAC:
129 return AV_CODEC_ID_FLAC;
130 case kCodecAMR_NB:
131 return AV_CODEC_ID_AMR_NB;
132 case kCodecAMR_WB:
133 return AV_CODEC_ID_AMR_WB;
134 case kCodecGSM_MS:
135 return AV_CODEC_ID_GSM_MS;
136 case kCodecPCM_ALAW:
137 return AV_CODEC_ID_PCM_ALAW;
138 case kCodecPCM_MULAW:
139 return AV_CODEC_ID_PCM_MULAW;
140 case kCodecOpus:
141 return AV_CODEC_ID_OPUS;
142 default:
143 DVLOG(1) << "Unknown AudioCodec: " << audio_codec;
145 return AV_CODEC_ID_NONE;
148 // Converts an FFmpeg video codec ID into its corresponding supported codec id.
149 static VideoCodec CodecIDToVideoCodec(AVCodecID codec_id) {
150 switch (codec_id) {
151 case AV_CODEC_ID_H264:
152 return kCodecH264;
153 case AV_CODEC_ID_THEORA:
154 return kCodecTheora;
155 case AV_CODEC_ID_MPEG4:
156 return kCodecMPEG4;
157 case AV_CODEC_ID_VP8:
158 return kCodecVP8;
159 case AV_CODEC_ID_VP9:
160 return kCodecVP9;
161 default:
162 DVLOG(1) << "Unknown video CodecID: " << codec_id;
164 return kUnknownVideoCodec;
167 static AVCodecID VideoCodecToCodecID(VideoCodec video_codec) {
168 switch (video_codec) {
169 case kCodecH264:
170 return AV_CODEC_ID_H264;
171 case kCodecTheora:
172 return AV_CODEC_ID_THEORA;
173 case kCodecMPEG4:
174 return AV_CODEC_ID_MPEG4;
175 case kCodecVP8:
176 return AV_CODEC_ID_VP8;
177 case kCodecVP9:
178 return AV_CODEC_ID_VP9;
179 default:
180 DVLOG(1) << "Unknown VideoCodec: " << video_codec;
182 return AV_CODEC_ID_NONE;
185 static VideoCodecProfile ProfileIDToVideoCodecProfile(int profile) {
186 // Clear out the CONSTRAINED & INTRA flags which are strict subsets of the
187 // corresponding profiles with which they're used.
188 profile &= ~FF_PROFILE_H264_CONSTRAINED;
189 profile &= ~FF_PROFILE_H264_INTRA;
190 switch (profile) {
191 case FF_PROFILE_H264_BASELINE:
192 return H264PROFILE_BASELINE;
193 case FF_PROFILE_H264_MAIN:
194 return H264PROFILE_MAIN;
195 case FF_PROFILE_H264_EXTENDED:
196 return H264PROFILE_EXTENDED;
197 case FF_PROFILE_H264_HIGH:
198 return H264PROFILE_HIGH;
199 case FF_PROFILE_H264_HIGH_10:
200 return H264PROFILE_HIGH10PROFILE;
201 case FF_PROFILE_H264_HIGH_422:
202 return H264PROFILE_HIGH422PROFILE;
203 case FF_PROFILE_H264_HIGH_444_PREDICTIVE:
204 return H264PROFILE_HIGH444PREDICTIVEPROFILE;
205 default:
206 DVLOG(1) << "Unknown profile id: " << profile;
208 return VIDEO_CODEC_PROFILE_UNKNOWN;
211 static int VideoCodecProfileToProfileID(VideoCodecProfile profile) {
212 switch (profile) {
213 case H264PROFILE_BASELINE:
214 return FF_PROFILE_H264_BASELINE;
215 case H264PROFILE_MAIN:
216 return FF_PROFILE_H264_MAIN;
217 case H264PROFILE_EXTENDED:
218 return FF_PROFILE_H264_EXTENDED;
219 case H264PROFILE_HIGH:
220 return FF_PROFILE_H264_HIGH;
221 case H264PROFILE_HIGH10PROFILE:
222 return FF_PROFILE_H264_HIGH_10;
223 case H264PROFILE_HIGH422PROFILE:
224 return FF_PROFILE_H264_HIGH_422;
225 case H264PROFILE_HIGH444PREDICTIVEPROFILE:
226 return FF_PROFILE_H264_HIGH_444_PREDICTIVE;
227 default:
228 DVLOG(1) << "Unknown VideoCodecProfile: " << profile;
230 return FF_PROFILE_UNKNOWN;
233 SampleFormat AVSampleFormatToSampleFormat(AVSampleFormat sample_format) {
234 switch (sample_format) {
235 case AV_SAMPLE_FMT_U8:
236 return kSampleFormatU8;
237 case AV_SAMPLE_FMT_S16:
238 return kSampleFormatS16;
239 case AV_SAMPLE_FMT_S32:
240 return kSampleFormatS32;
241 case AV_SAMPLE_FMT_FLT:
242 return kSampleFormatF32;
243 case AV_SAMPLE_FMT_S16P:
244 return kSampleFormatPlanarS16;
245 case AV_SAMPLE_FMT_FLTP:
246 return kSampleFormatPlanarF32;
247 default:
248 DVLOG(1) << "Unknown AVSampleFormat: " << sample_format;
250 return kUnknownSampleFormat;
253 static AVSampleFormat SampleFormatToAVSampleFormat(SampleFormat sample_format) {
254 switch (sample_format) {
255 case kSampleFormatU8:
256 return AV_SAMPLE_FMT_U8;
257 case kSampleFormatS16:
258 return AV_SAMPLE_FMT_S16;
259 case kSampleFormatS32:
260 return AV_SAMPLE_FMT_S32;
261 case kSampleFormatF32:
262 return AV_SAMPLE_FMT_FLT;
263 case kSampleFormatPlanarS16:
264 return AV_SAMPLE_FMT_S16P;
265 case kSampleFormatPlanarF32:
266 return AV_SAMPLE_FMT_FLTP;
267 default:
268 DVLOG(1) << "Unknown SampleFormat: " << sample_format;
270 return AV_SAMPLE_FMT_NONE;
273 void AVCodecContextToAudioDecoderConfig(
274 const AVCodecContext* codec_context,
275 bool is_encrypted,
276 AudioDecoderConfig* config,
277 bool record_stats) {
278 DCHECK_EQ(codec_context->codec_type, AVMEDIA_TYPE_AUDIO);
280 AudioCodec codec = CodecIDToAudioCodec(codec_context->codec_id);
282 SampleFormat sample_format =
283 AVSampleFormatToSampleFormat(codec_context->sample_fmt);
285 ChannelLayout channel_layout = ChannelLayoutToChromeChannelLayout(
286 codec_context->channel_layout, codec_context->channels);
288 int sample_rate = codec_context->sample_rate;
289 if (codec == kCodecOpus) {
290 // |codec_context->sample_fmt| is not set by FFmpeg because Opus decoding is
291 // not enabled in FFmpeg. It doesn't matter what value is set here, so long
292 // as it's valid, the true sample format is selected inside the decoder.
293 sample_format = kSampleFormatF32;
295 // Always use 48kHz for OPUS. Technically we should match to the highest
296 // supported hardware sample rate among [8, 12, 16, 24, 48] kHz, but we
297 // don't know the hardware sample rate at this point and those rates are
298 // rarely used for output. See the "Input Sample Rate" section of the spec:
299 // http://tools.ietf.org/html/draft-terriberry-oggopus-01#page-11
300 sample_rate = 48000;
303 base::TimeDelta seek_preroll;
304 if (codec_context->seek_preroll > 0) {
305 seek_preroll = base::TimeDelta::FromMicroseconds(
306 codec_context->seek_preroll * 1000000.0 / codec_context->sample_rate);
309 config->Initialize(codec,
310 sample_format,
311 channel_layout,
312 sample_rate,
313 codec_context->extradata,
314 codec_context->extradata_size,
315 is_encrypted,
316 record_stats,
317 seek_preroll,
318 codec_context->delay);
319 if (codec != kCodecOpus) {
320 DCHECK_EQ(av_get_bytes_per_sample(codec_context->sample_fmt) * 8,
321 config->bits_per_channel());
325 void AVStreamToAudioDecoderConfig(
326 const AVStream* stream,
327 AudioDecoderConfig* config,
328 bool record_stats) {
329 bool is_encrypted = false;
330 AVDictionaryEntry* key = av_dict_get(stream->metadata, "enc_key_id", NULL, 0);
331 if (key)
332 is_encrypted = true;
333 return AVCodecContextToAudioDecoderConfig(
334 stream->codec, is_encrypted, config, record_stats);
337 void AudioDecoderConfigToAVCodecContext(const AudioDecoderConfig& config,
338 AVCodecContext* codec_context) {
339 codec_context->codec_type = AVMEDIA_TYPE_AUDIO;
340 codec_context->codec_id = AudioCodecToCodecID(config.codec(),
341 config.sample_format());
342 codec_context->sample_fmt = SampleFormatToAVSampleFormat(
343 config.sample_format());
345 // TODO(scherkus): should we set |channel_layout|? I'm not sure if FFmpeg uses
346 // said information to decode.
347 codec_context->channels =
348 ChannelLayoutToChannelCount(config.channel_layout());
349 codec_context->sample_rate = config.samples_per_second();
351 if (config.extra_data()) {
352 codec_context->extradata_size = config.extra_data_size();
353 codec_context->extradata = reinterpret_cast<uint8_t*>(
354 av_malloc(config.extra_data_size() + FF_INPUT_BUFFER_PADDING_SIZE));
355 memcpy(codec_context->extradata, config.extra_data(),
356 config.extra_data_size());
357 memset(codec_context->extradata + config.extra_data_size(), '\0',
358 FF_INPUT_BUFFER_PADDING_SIZE);
359 } else {
360 codec_context->extradata = NULL;
361 codec_context->extradata_size = 0;
365 void AVStreamToVideoDecoderConfig(
366 const AVStream* stream,
367 VideoDecoderConfig* config,
368 bool record_stats) {
369 gfx::Size coded_size(stream->codec->coded_width, stream->codec->coded_height);
371 // TODO(vrk): This assumes decoded frame data starts at (0, 0), which is true
372 // for now, but may not always be true forever. Fix this in the future.
373 gfx::Rect visible_rect(stream->codec->width, stream->codec->height);
375 AVRational aspect_ratio = { 1, 1 };
376 if (stream->sample_aspect_ratio.num)
377 aspect_ratio = stream->sample_aspect_ratio;
378 else if (stream->codec->sample_aspect_ratio.num)
379 aspect_ratio = stream->codec->sample_aspect_ratio;
381 VideoCodec codec = CodecIDToVideoCodec(stream->codec->codec_id);
383 VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
384 if (codec == kCodecVP8)
385 profile = VP8PROFILE_ANY;
386 else if (codec == kCodecVP9)
387 profile = VP9PROFILE_ANY;
388 else
389 profile = ProfileIDToVideoCodecProfile(stream->codec->profile);
391 gfx::Size natural_size = GetNaturalSize(
392 visible_rect.size(), aspect_ratio.num, aspect_ratio.den);
394 if (record_stats) {
395 // Note the PRESUBMIT_IGNORE_UMA_MAX below, this silences the PRESUBMIT.py
396 // check for uma enum max usage, since we're abusing
397 // UMA_HISTOGRAM_ENUMERATION to report a discrete value.
398 UMA_HISTOGRAM_ENUMERATION("Media.VideoColorRange",
399 stream->codec->color_range,
400 AVCOL_RANGE_NB); // PRESUBMIT_IGNORE_UMA_MAX
403 VideoFrame::Format format = PixelFormatToVideoFormat(stream->codec->pix_fmt);
404 if (codec == kCodecVP9) {
405 // TODO(tomfinegan): libavcodec doesn't know about VP9.
406 format = VideoFrame::YV12;
407 coded_size = visible_rect.size();
410 // Pad out |coded_size| for subsampled YUV formats.
411 if (format != VideoFrame::YV24) {
412 coded_size.set_width((coded_size.width() + 1) / 2 * 2);
413 if (format != VideoFrame::YV16)
414 coded_size.set_height((coded_size.height() + 1) / 2 * 2);
417 bool is_encrypted = false;
418 AVDictionaryEntry* key = av_dict_get(stream->metadata, "enc_key_id", NULL, 0);
419 if (key)
420 is_encrypted = true;
422 AVDictionaryEntry* webm_alpha =
423 av_dict_get(stream->metadata, "alpha_mode", NULL, 0);
424 if (webm_alpha && !strcmp(webm_alpha->value, "1")) {
425 format = VideoFrame::YV12A;
428 config->Initialize(codec,
429 profile,
430 format,
431 coded_size, visible_rect, natural_size,
432 stream->codec->extradata, stream->codec->extradata_size,
433 is_encrypted,
434 record_stats);
437 void VideoDecoderConfigToAVCodecContext(
438 const VideoDecoderConfig& config,
439 AVCodecContext* codec_context) {
440 codec_context->codec_type = AVMEDIA_TYPE_VIDEO;
441 codec_context->codec_id = VideoCodecToCodecID(config.codec());
442 codec_context->profile = VideoCodecProfileToProfileID(config.profile());
443 codec_context->coded_width = config.coded_size().width();
444 codec_context->coded_height = config.coded_size().height();
445 codec_context->pix_fmt = VideoFormatToPixelFormat(config.format());
447 if (config.extra_data()) {
448 codec_context->extradata_size = config.extra_data_size();
449 codec_context->extradata = reinterpret_cast<uint8_t*>(
450 av_malloc(config.extra_data_size() + FF_INPUT_BUFFER_PADDING_SIZE));
451 memcpy(codec_context->extradata, config.extra_data(),
452 config.extra_data_size());
453 memset(codec_context->extradata + config.extra_data_size(), '\0',
454 FF_INPUT_BUFFER_PADDING_SIZE);
455 } else {
456 codec_context->extradata = NULL;
457 codec_context->extradata_size = 0;
461 ChannelLayout ChannelLayoutToChromeChannelLayout(int64_t layout, int channels) {
462 switch (layout) {
463 case AV_CH_LAYOUT_MONO:
464 return CHANNEL_LAYOUT_MONO;
465 case AV_CH_LAYOUT_STEREO:
466 return CHANNEL_LAYOUT_STEREO;
467 case AV_CH_LAYOUT_2_1:
468 return CHANNEL_LAYOUT_2_1;
469 case AV_CH_LAYOUT_SURROUND:
470 return CHANNEL_LAYOUT_SURROUND;
471 case AV_CH_LAYOUT_4POINT0:
472 return CHANNEL_LAYOUT_4_0;
473 case AV_CH_LAYOUT_2_2:
474 return CHANNEL_LAYOUT_2_2;
475 case AV_CH_LAYOUT_QUAD:
476 return CHANNEL_LAYOUT_QUAD;
477 case AV_CH_LAYOUT_5POINT0:
478 return CHANNEL_LAYOUT_5_0;
479 case AV_CH_LAYOUT_5POINT1:
480 return CHANNEL_LAYOUT_5_1;
481 case AV_CH_LAYOUT_5POINT0_BACK:
482 return CHANNEL_LAYOUT_5_0_BACK;
483 case AV_CH_LAYOUT_5POINT1_BACK:
484 return CHANNEL_LAYOUT_5_1_BACK;
485 case AV_CH_LAYOUT_7POINT0:
486 return CHANNEL_LAYOUT_7_0;
487 case AV_CH_LAYOUT_7POINT1:
488 return CHANNEL_LAYOUT_7_1;
489 case AV_CH_LAYOUT_7POINT1_WIDE:
490 return CHANNEL_LAYOUT_7_1_WIDE;
491 case AV_CH_LAYOUT_STEREO_DOWNMIX:
492 return CHANNEL_LAYOUT_STEREO_DOWNMIX;
493 case AV_CH_LAYOUT_2POINT1:
494 return CHANNEL_LAYOUT_2POINT1;
495 case AV_CH_LAYOUT_3POINT1:
496 return CHANNEL_LAYOUT_3_1;
497 case AV_CH_LAYOUT_4POINT1:
498 return CHANNEL_LAYOUT_4_1;
499 case AV_CH_LAYOUT_6POINT0:
500 return CHANNEL_LAYOUT_6_0;
501 case AV_CH_LAYOUT_6POINT0_FRONT:
502 return CHANNEL_LAYOUT_6_0_FRONT;
503 case AV_CH_LAYOUT_HEXAGONAL:
504 return CHANNEL_LAYOUT_HEXAGONAL;
505 case AV_CH_LAYOUT_6POINT1:
506 return CHANNEL_LAYOUT_6_1;
507 case AV_CH_LAYOUT_6POINT1_BACK:
508 return CHANNEL_LAYOUT_6_1_BACK;
509 case AV_CH_LAYOUT_6POINT1_FRONT:
510 return CHANNEL_LAYOUT_6_1_FRONT;
511 case AV_CH_LAYOUT_7POINT0_FRONT:
512 return CHANNEL_LAYOUT_7_0_FRONT;
513 #ifdef AV_CH_LAYOUT_7POINT1_WIDE_BACK
514 case AV_CH_LAYOUT_7POINT1_WIDE_BACK:
515 return CHANNEL_LAYOUT_7_1_WIDE_BACK;
516 #endif
517 case AV_CH_LAYOUT_OCTAGONAL:
518 return CHANNEL_LAYOUT_OCTAGONAL;
519 default:
520 // FFmpeg channel_layout is 0 for .wav and .mp3. Attempt to guess layout
521 // based on the channel count.
522 return GuessChannelLayout(channels);
526 VideoFrame::Format PixelFormatToVideoFormat(PixelFormat pixel_format) {
527 switch (pixel_format) {
528 case PIX_FMT_YUV422P:
529 return VideoFrame::YV16;
530 case PIX_FMT_YUV444P:
531 return VideoFrame::YV24;
532 case PIX_FMT_YUV420P:
533 return VideoFrame::YV12;
534 case PIX_FMT_YUVJ420P:
535 return VideoFrame::YV12J;
536 case PIX_FMT_YUVA420P:
537 return VideoFrame::YV12A;
538 default:
539 DVLOG(1) << "Unsupported PixelFormat: " << pixel_format;
541 return VideoFrame::UNKNOWN;
544 PixelFormat VideoFormatToPixelFormat(VideoFrame::Format video_format) {
545 switch (video_format) {
546 case VideoFrame::YV16:
547 return PIX_FMT_YUV422P;
548 case VideoFrame::YV12:
549 return PIX_FMT_YUV420P;
550 case VideoFrame::YV12J:
551 return PIX_FMT_YUVJ420P;
552 case VideoFrame::YV12A:
553 return PIX_FMT_YUVA420P;
554 case VideoFrame::YV24:
555 return PIX_FMT_YUV444P;
556 default:
557 DVLOG(1) << "Unsupported VideoFrame::Format: " << video_format;
559 return PIX_FMT_NONE;
562 bool FFmpegUTCDateToTime(const char* date_utc,
563 base::Time* out) {
564 DCHECK(date_utc);
565 DCHECK(out);
567 std::vector<std::string> fields;
568 std::vector<std::string> date_fields;
569 std::vector<std::string> time_fields;
570 base::Time::Exploded exploded;
571 exploded.millisecond = 0;
573 // TODO(acolwell): Update this parsing code when FFmpeg returns sub-second
574 // information.
575 if ((Tokenize(date_utc, " ", &fields) == 2) &&
576 (Tokenize(fields[0], "-", &date_fields) == 3) &&
577 (Tokenize(fields[1], ":", &time_fields) == 3) &&
578 base::StringToInt(date_fields[0], &exploded.year) &&
579 base::StringToInt(date_fields[1], &exploded.month) &&
580 base::StringToInt(date_fields[2], &exploded.day_of_month) &&
581 base::StringToInt(time_fields[0], &exploded.hour) &&
582 base::StringToInt(time_fields[1], &exploded.minute) &&
583 base::StringToInt(time_fields[2], &exploded.second)) {
584 base::Time parsed_time = base::Time::FromUTCExploded(exploded);
585 if (parsed_time.is_null())
586 return false;
588 *out = parsed_time;
589 return true;
592 return false;
595 } // namespace media