⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 output_example.c.svn-base

📁 mediastreamer2是开源的网络传输媒体流的库
💻 SVN-BASE
📖 第 1 页 / 共 2 页
字号:
        /* buffers passed into lav* can be allocated any way you prefer,           as long as they're aligned enough for the architecture, and           they're freed appropriately (such as using av_free for buffers           allocated with av_malloc) */        video_outbuf_size = 200000;        video_outbuf = av_malloc(video_outbuf_size);    }    /* allocate the encoded raw picture */    picture = alloc_picture(c->pix_fmt, c->width, c->height);    if (!picture) {        fprintf(stderr, "Could not allocate picture\n");        exit(1);    }    /* if the output format is not YUV420P, then a temporary YUV420P       picture is needed too. It is then converted to the required       output format */    tmp_picture = NULL;    if (c->pix_fmt != PIX_FMT_YUV420P) {        tmp_picture = alloc_picture(PIX_FMT_YUV420P, c->width, c->height);        if (!tmp_picture) {            fprintf(stderr, "Could not allocate temporary picture\n");            exit(1);        }    }}/* prepare a dummy image */static void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height){    int x, y, i;    i = frame_index;    /* Y */    for(y=0;y<height;y++) {        for(x=0;x<width;x++) {            pict->data[0][y * pict->linesize[0] + x] = x + y + i * 3;        }    }    /* Cb and Cr */    for(y=0;y<height/2;y++) {        for(x=0;x<width/2;x++) {            pict->data[1][y * pict->linesize[1] + x] = 128 + y + i * 2;            pict->data[2][y * pict->linesize[2] + x] = 64 + x + i * 5;        }    }}static void write_video_frame(AVFormatContext *oc, AVStream *st){    int out_size, ret;    AVCodecContext *c;    static struct SwsContext *img_convert_ctx;    c = st->codec;    if (frame_count >= STREAM_NB_FRAMES) {        /* no more frame to compress. The codec has a latency of a few           frames if using B frames, so we get the last frames by           passing the same picture again */    } else {        if (c->pix_fmt != PIX_FMT_YUV420P) {            /* as we only generate a YUV420P picture, we must convert it               to the codec pixel format if needed */            if (img_convert_ctx == NULL) {                img_convert_ctx = sws_getContext(c->width, c->height,                                                 PIX_FMT_YUV420P,                                                 c->width, c->height,                                                 c->pix_fmt,                                                 sws_flags, NULL, NULL, NULL);                if (img_convert_ctx == NULL) {                    fprintf(stderr, "Cannot initialize the conversion context\n");                    exit(1);                }            }            fill_yuv_image(tmp_picture, frame_count, c->width, c->height);            sws_scale(img_convert_ctx, tmp_picture->data, tmp_picture->linesize,                      0, c->height, picture->data, picture->linesize);        } else {            fill_yuv_image(picture, frame_count, c->width, c->height);        }    }    if (oc->oformat->flags & AVFMT_RAWPICTURE) {        /* raw video case. The API will change slightly in the near           futur for that */        AVPacket pkt;        av_init_packet(&pkt);        pkt.flags |= PKT_FLAG_KEY;        pkt.stream_index= st->index;        pkt.data= (uint8_t *)picture;        pkt.size= sizeof(AVPicture);        ret = av_write_frame(oc, &pkt);    } else {        /* encode the image */        out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);        /* if zero size, it means the image was buffered */        if (out_size > 0) {            AVPacket pkt;            av_init_packet(&pkt);            pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);            if(c->coded_frame->key_frame)                pkt.flags |= PKT_FLAG_KEY;            pkt.stream_index= st->index;            pkt.data= video_outbuf;            pkt.size= out_size;            /* write the compressed frame in the media file */            ret = av_write_frame(oc, &pkt);        } else {            ret = 0;        }    }    if (ret != 0) {        fprintf(stderr, "Error while writing video frame\n");        exit(1);    }    frame_count++;}static void close_video(AVFormatContext *oc, AVStream *st){    avcodec_close(st->codec);    av_free(picture->data[0]);    av_free(picture);    if (tmp_picture) {        av_free(tmp_picture->data[0]);        av_free(tmp_picture);    }    av_free(video_outbuf);}/**************************************************************//* media file output */int main(int argc, char **argv){    const char *filename;    AVOutputFormat *fmt;    AVFormatContext *oc;    AVStream *audio_st, *video_st;    double audio_pts, video_pts;    int i;    /* initialize libavcodec, and register all codecs and formats */    av_register_all();    if (argc != 2) {        printf("usage: %s output_file\n"               "API example program to output a media file with libavformat.\n"               "The output format is automatically guessed according to the file extension.\n"               "Raw images can also be output by using '%%d' in the filename\n"               "\n", argv[0]);        exit(1);    }    filename = argv[1];    /* auto detect the output format from the name. default is       mpeg. */    fmt = guess_format(NULL, filename, NULL);    if (!fmt) {        printf("Could not deduce output format from file extension: using MPEG.\n");        fmt = guess_format("mpeg", NULL, NULL);    }    if (!fmt) {        fprintf(stderr, "Could not find suitable output format\n");        exit(1);    }    /* allocate the output media context */    oc = av_alloc_format_context();    if (!oc) {        fprintf(stderr, "Memory error\n");        exit(1);    }    oc->oformat = fmt;    snprintf(oc->filename, sizeof(oc->filename), "%s", filename);    /* add the audio and video streams using the default format codecs       and initialize the codecs */    video_st = NULL;    audio_st = NULL;    if (fmt->video_codec != CODEC_ID_NONE) {        video_st = add_video_stream(oc, fmt->video_codec);    }    if (fmt->audio_codec != CODEC_ID_NONE) {        audio_st = add_audio_stream(oc, fmt->audio_codec);    }    /* set the output parameters (must be done even if no       parameters). */    if (av_set_parameters(oc, NULL) < 0) {        fprintf(stderr, "Invalid output format parameters\n");        exit(1);    }    dump_format(oc, 0, filename, 1);    /* now that all the parameters are set, we can open the audio and       video codecs and allocate the necessary encode buffers */    if (video_st)        open_video(oc, video_st);    if (audio_st)        open_audio(oc, audio_st);    /* open the output file, if needed */    if (!(fmt->flags & AVFMT_NOFILE)) {        if (url_fopen(&oc->pb, filename, URL_WRONLY) < 0) {            fprintf(stderr, "Could not open '%s'\n", filename);            exit(1);        }    }    /* write the stream header, if any */    av_write_header(oc);    for(;;) {        /* compute current audio and video time */        if (audio_st)            audio_pts = (double)audio_st->pts.val * audio_st->time_base.num / audio_st->time_base.den;        else            audio_pts = 0.0;        if (video_st)            video_pts = (double)video_st->pts.val * video_st->time_base.num / video_st->time_base.den;        else            video_pts = 0.0;        if ((!audio_st || audio_pts >= STREAM_DURATION) &&            (!video_st || video_pts >= STREAM_DURATION))            break;        /* write interleaved audio and video frames */        if (!video_st || (video_st && audio_st && audio_pts < video_pts)) {            write_audio_frame(oc, audio_st);        } else {            write_video_frame(oc, video_st);        }    }    /* close each codec */    if (video_st)        close_video(oc, video_st);    if (audio_st)        close_audio(oc, audio_st);    /* write the trailer, if any */    av_write_trailer(oc);    /* free the streams */    for(i = 0; i < oc->nb_streams; i++) {        av_freep(&oc->streams[i]->codec);        av_freep(&oc->streams[i]);    }    if (!(fmt->flags & AVFMT_NOFILE)) {        /* close the output file */        url_fclose(oc->pb);    }    /* free the stream */    av_free(oc);    return 0;}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -