r1009: Move the dependencies to newer package names
[cinelerra_cv/mob.git] / quicktime / ffmpeg / output_example.c
blobf61229dedd45c2dd30929bd30167fc5ce58cedab
1 /*
2 * Libavformat API example: Output a media file in any supported
3 * libavformat format. The default codecs are used.
4 *
5 * Copyright (c) 2003 Fabrice Bellard
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
25 #include <stdlib.h>
26 #include <stdio.h>
27 #include <string.h>
28 #include <math.h>
30 #ifndef M_PI
31 #define M_PI 3.1415926535897931
32 #endif
34 #include "avformat.h"
36 /* 5 seconds stream duration */
37 #define STREAM_DURATION 5.0
38 #define STREAM_FRAME_RATE 25 /* 25 images/s */
39 #define STREAM_NB_FRAMES ((int)(STREAM_DURATION * STREAM_FRAME_RATE))
40 #define STREAM_PIX_FMT PIX_FMT_YUV420P /* default pix_fmt */
42 /**************************************************************/
43 /* audio output */
45 float t, tincr, tincr2;
46 int16_t *samples;
47 uint8_t *audio_outbuf;
48 int audio_outbuf_size;
49 int audio_input_frame_size;
51 /*
52 * add an audio output stream
54 AVStream *add_audio_stream(AVFormatContext *oc, int codec_id)
56 AVCodecContext *c;
57 AVStream *st;
59 st = av_new_stream(oc, 1);
60 if (!st) {
61 fprintf(stderr, "Could not alloc stream\n");
62 exit(1);
65 c = st->codec;
66 c->codec_id = codec_id;
67 c->codec_type = CODEC_TYPE_AUDIO;
69 /* put sample parameters */
70 c->bit_rate = 64000;
71 c->sample_rate = 44100;
72 c->channels = 2;
73 return st;
76 void open_audio(AVFormatContext *oc, AVStream *st)
78 AVCodecContext *c;
79 AVCodec *codec;
81 c = st->codec;
83 /* find the audio encoder */
84 codec = avcodec_find_encoder(c->codec_id);
85 if (!codec) {
86 fprintf(stderr, "codec not found\n");
87 exit(1);
90 /* open it */
91 if (avcodec_open(c, codec) < 0) {
92 fprintf(stderr, "could not open codec\n");
93 exit(1);
96 /* init signal generator */
97 t = 0;
98 tincr = 2 * M_PI * 110.0 / c->sample_rate;
99 /* increment frequency by 110 Hz per second */
100 tincr2 = 2 * M_PI * 110.0 / c->sample_rate / c->sample_rate;
102 audio_outbuf_size = 10000;
103 audio_outbuf = malloc(audio_outbuf_size);
105 /* ugly hack for PCM codecs (will be removed ASAP with new PCM
106 support to compute the input frame size in samples */
107 if (c->frame_size <= 1) {
108 audio_input_frame_size = audio_outbuf_size / c->channels;
109 switch(st->codec->codec_id) {
110 case CODEC_ID_PCM_S16LE:
111 case CODEC_ID_PCM_S16BE:
112 case CODEC_ID_PCM_U16LE:
113 case CODEC_ID_PCM_U16BE:
114 audio_input_frame_size >>= 1;
115 break;
116 default:
117 break;
119 } else {
120 audio_input_frame_size = c->frame_size;
122 samples = malloc(audio_input_frame_size * 2 * c->channels);
125 /* prepare a 16 bit dummy audio frame of 'frame_size' samples and
126 'nb_channels' channels */
127 void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
129 int j, i, v;
130 int16_t *q;
132 q = samples;
133 for(j=0;j<frame_size;j++) {
134 v = (int)(sin(t) * 10000);
135 for(i = 0; i < nb_channels; i++)
136 *q++ = v;
137 t += tincr;
138 tincr += tincr2;
142 void write_audio_frame(AVFormatContext *oc, AVStream *st)
144 AVCodecContext *c;
145 AVPacket pkt;
146 av_init_packet(&pkt);
148 c = st->codec;
150 get_audio_frame(samples, audio_input_frame_size, c->channels);
152 pkt.size= avcodec_encode_audio(c, audio_outbuf, audio_outbuf_size, samples);
154 pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);
155 pkt.flags |= PKT_FLAG_KEY;
156 pkt.stream_index= st->index;
157 pkt.data= audio_outbuf;
159 /* write the compressed frame in the media file */
160 if (av_write_frame(oc, &pkt) != 0) {
161 fprintf(stderr, "Error while writing audio frame\n");
162 exit(1);
166 void close_audio(AVFormatContext *oc, AVStream *st)
168 avcodec_close(st->codec);
170 av_free(samples);
171 av_free(audio_outbuf);
174 /**************************************************************/
175 /* video output */
177 AVFrame *picture, *tmp_picture;
178 uint8_t *video_outbuf;
179 int frame_count, video_outbuf_size;
181 /* add a video output stream */
182 AVStream *add_video_stream(AVFormatContext *oc, int codec_id)
184 AVCodecContext *c;
185 AVStream *st;
187 st = av_new_stream(oc, 0);
188 if (!st) {
189 fprintf(stderr, "Could not alloc stream\n");
190 exit(1);
193 c = st->codec;
194 c->codec_id = codec_id;
195 c->codec_type = CODEC_TYPE_VIDEO;
197 /* put sample parameters */
198 c->bit_rate = 400000;
199 /* resolution must be a multiple of two */
200 c->width = 352;
201 c->height = 288;
202 /* frames per second */
203 c->time_base.den = STREAM_FRAME_RATE;
204 c->time_base.num = 1;
205 c->gop_size = 12; /* emit one intra frame every twelve frames at most */
206 c->pix_fmt = STREAM_PIX_FMT;
207 if (c->codec_id == CODEC_ID_MPEG2VIDEO) {
208 /* just for testing, we also add B frames */
209 c->max_b_frames = 2;
211 if (c->codec_id == CODEC_ID_MPEG1VIDEO){
212 /* needed to avoid using macroblocks in which some coeffs overflow
213 this doesnt happen with normal video, it just happens here as the
214 motion of the chroma plane doesnt match the luma plane */
215 c->mb_decision=2;
217 // some formats want stream headers to be seperate
218 if(!strcmp(oc->oformat->name, "mp4") || !strcmp(oc->oformat->name, "mov") || !strcmp(oc->oformat->name, "3gp"))
219 c->flags |= CODEC_FLAG_GLOBAL_HEADER;
221 return st;
224 AVFrame *alloc_picture(int pix_fmt, int width, int height)
226 AVFrame *picture;
227 uint8_t *picture_buf;
228 int size;
230 picture = avcodec_alloc_frame();
231 if (!picture)
232 return NULL;
233 size = avpicture_get_size(pix_fmt, width, height);
234 picture_buf = malloc(size);
235 if (!picture_buf) {
236 av_free(picture);
237 return NULL;
239 avpicture_fill((AVPicture *)picture, picture_buf,
240 pix_fmt, width, height);
241 return picture;
244 void open_video(AVFormatContext *oc, AVStream *st)
246 AVCodec *codec;
247 AVCodecContext *c;
249 c = st->codec;
251 /* find the video encoder */
252 codec = avcodec_find_encoder(c->codec_id);
253 if (!codec) {
254 fprintf(stderr, "codec not found\n");
255 exit(1);
258 /* open the codec */
259 if (avcodec_open(c, codec) < 0) {
260 fprintf(stderr, "could not open codec\n");
261 exit(1);
264 video_outbuf = NULL;
265 if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) {
266 /* allocate output buffer */
267 /* XXX: API change will be done */
268 video_outbuf_size = 200000;
269 video_outbuf = malloc(video_outbuf_size);
272 /* allocate the encoded raw picture */
273 picture = alloc_picture(c->pix_fmt, c->width, c->height);
274 if (!picture) {
275 fprintf(stderr, "Could not allocate picture\n");
276 exit(1);
279 /* if the output format is not YUV420P, then a temporary YUV420P
280 picture is needed too. It is then converted to the required
281 output format */
282 tmp_picture = NULL;
283 if (c->pix_fmt != PIX_FMT_YUV420P) {
284 tmp_picture = alloc_picture(PIX_FMT_YUV420P, c->width, c->height);
285 if (!tmp_picture) {
286 fprintf(stderr, "Could not allocate temporary picture\n");
287 exit(1);
292 /* prepare a dummy image */
293 void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height)
295 int x, y, i;
297 i = frame_index;
299 /* Y */
300 for(y=0;y<height;y++) {
301 for(x=0;x<width;x++) {
302 pict->data[0][y * pict->linesize[0] + x] = x + y + i * 3;
306 /* Cb and Cr */
307 for(y=0;y<height/2;y++) {
308 for(x=0;x<width/2;x++) {
309 pict->data[1][y * pict->linesize[1] + x] = 128 + y + i * 2;
310 pict->data[2][y * pict->linesize[2] + x] = 64 + x + i * 5;
315 void write_video_frame(AVFormatContext *oc, AVStream *st)
317 int out_size, ret;
318 AVCodecContext *c;
320 c = st->codec;
322 if (frame_count >= STREAM_NB_FRAMES) {
323 /* no more frame to compress. The codec has a latency of a few
324 frames if using B frames, so we get the last frames by
325 passing the same picture again */
326 } else {
327 if (c->pix_fmt != PIX_FMT_YUV420P) {
328 /* as we only generate a YUV420P picture, we must convert it
329 to the codec pixel format if needed */
330 fill_yuv_image(tmp_picture, frame_count, c->width, c->height);
331 img_convert((AVPicture *)picture, c->pix_fmt,
332 (AVPicture *)tmp_picture, PIX_FMT_YUV420P,
333 c->width, c->height);
334 } else {
335 fill_yuv_image(picture, frame_count, c->width, c->height);
340 if (oc->oformat->flags & AVFMT_RAWPICTURE) {
341 /* raw video case. The API will change slightly in the near
342 futur for that */
343 AVPacket pkt;
344 av_init_packet(&pkt);
346 pkt.flags |= PKT_FLAG_KEY;
347 pkt.stream_index= st->index;
348 pkt.data= (uint8_t *)picture;
349 pkt.size= sizeof(AVPicture);
351 ret = av_write_frame(oc, &pkt);
352 } else {
353 /* encode the image */
354 out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
355 /* if zero size, it means the image was buffered */
356 if (out_size > 0) {
357 AVPacket pkt;
358 av_init_packet(&pkt);
360 pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);
361 if(c->coded_frame->key_frame)
362 pkt.flags |= PKT_FLAG_KEY;
363 pkt.stream_index= st->index;
364 pkt.data= video_outbuf;
365 pkt.size= out_size;
367 /* write the compressed frame in the media file */
368 ret = av_write_frame(oc, &pkt);
369 } else {
370 ret = 0;
373 if (ret != 0) {
374 fprintf(stderr, "Error while writing video frame\n");
375 exit(1);
377 frame_count++;
380 void close_video(AVFormatContext *oc, AVStream *st)
382 avcodec_close(st->codec);
383 av_free(picture->data[0]);
384 av_free(picture);
385 if (tmp_picture) {
386 av_free(tmp_picture->data[0]);
387 av_free(tmp_picture);
389 av_free(video_outbuf);
392 /**************************************************************/
393 /* media file output */
395 int main(int argc, char **argv)
397 const char *filename;
398 AVOutputFormat *fmt;
399 AVFormatContext *oc;
400 AVStream *audio_st, *video_st;
401 double audio_pts, video_pts;
402 int i;
404 /* initialize libavcodec, and register all codecs and formats */
405 av_register_all();
407 if (argc != 2) {
408 printf("usage: %s output_file\n"
409 "API example program to output a media file with libavformat.\n"
410 "The output format is automatically guessed according to the file extension.\n"
411 "Raw images can also be output by using '%%d' in the filename\n"
412 "\n", argv[0]);
413 exit(1);
416 filename = argv[1];
418 /* auto detect the output format from the name. default is
419 mpeg. */
420 fmt = guess_format(NULL, filename, NULL);
421 if (!fmt) {
422 printf("Could not deduce output format from file extension: using MPEG.\n");
423 fmt = guess_format("mpeg", NULL, NULL);
425 if (!fmt) {
426 fprintf(stderr, "Could not find suitable output format\n");
427 exit(1);
430 /* allocate the output media context */
431 oc = av_alloc_format_context();
432 if (!oc) {
433 fprintf(stderr, "Memory error\n");
434 exit(1);
436 oc->oformat = fmt;
437 snprintf(oc->filename, sizeof(oc->filename), "%s", filename);
439 /* add the audio and video streams using the default format codecs
440 and initialize the codecs */
441 video_st = NULL;
442 audio_st = NULL;
443 if (fmt->video_codec != CODEC_ID_NONE) {
444 video_st = add_video_stream(oc, fmt->video_codec);
446 if (fmt->audio_codec != CODEC_ID_NONE) {
447 audio_st = add_audio_stream(oc, fmt->audio_codec);
450 /* set the output parameters (must be done even if no
451 parameters). */
452 if (av_set_parameters(oc, NULL) < 0) {
453 fprintf(stderr, "Invalid output format parameters\n");
454 exit(1);
457 dump_format(oc, 0, filename, 1);
459 /* now that all the parameters are set, we can open the audio and
460 video codecs and allocate the necessary encode buffers */
461 if (video_st)
462 open_video(oc, video_st);
463 if (audio_st)
464 open_audio(oc, audio_st);
466 /* open the output file, if needed */
467 if (!(fmt->flags & AVFMT_NOFILE)) {
468 if (url_fopen(&oc->pb, filename, URL_WRONLY) < 0) {
469 fprintf(stderr, "Could not open '%s'\n", filename);
470 exit(1);
474 /* write the stream header, if any */
475 av_write_header(oc);
477 for(;;) {
478 /* compute current audio and video time */
479 if (audio_st)
480 audio_pts = (double)audio_st->pts.val * audio_st->time_base.num / audio_st->time_base.den;
481 else
482 audio_pts = 0.0;
484 if (video_st)
485 video_pts = (double)video_st->pts.val * video_st->time_base.num / video_st->time_base.den;
486 else
487 video_pts = 0.0;
489 if ((!audio_st || audio_pts >= STREAM_DURATION) &&
490 (!video_st || video_pts >= STREAM_DURATION))
491 break;
493 /* write interleaved audio and video frames */
494 if (!video_st || (video_st && audio_st && audio_pts < video_pts)) {
495 write_audio_frame(oc, audio_st);
496 } else {
497 write_video_frame(oc, video_st);
501 /* close each codec */
502 if (video_st)
503 close_video(oc, video_st);
504 if (audio_st)
505 close_audio(oc, audio_st);
507 /* write the trailer, if any */
508 av_write_trailer(oc);
510 /* free the streams */
511 for(i = 0; i < oc->nb_streams; i++) {
512 av_freep(&oc->streams[i]);
515 if (!(fmt->flags & AVFMT_NOFILE)) {
516 /* close the output file */
517 url_fclose(&oc->pb);
520 /* free the stream */
521 av_free(oc);
523 return 0;