⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 ffplay.c

📁 mediastreamer2是开源的网络传输媒体流的库
💻 C
📖 第 1 页 / 共 5 页
字号:
/* * FFplay : Simple Media Player based on the ffmpeg libraries * Copyright (c) 2003 Fabrice Bellard * * This file is part of FFmpeg. * * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */#include <math.h>#include <limits.h>#include "avformat.h"#include "avdevice.h"#include "rtsp.h"#include "swscale.h"#include "avstring.h"#include "version.h"#include "cmdutils.h"#include <SDL.h>#include <SDL_thread.h>#ifdef __MINGW32__#undef main /* We don't want SDL to override our main() */#endif#undef exitstatic const char program_name[] = "FFplay";static const int program_birth_year = 2003;//#define DEBUG_SYNC#define MAX_VIDEOQ_SIZE (5 * 256 * 1024)#define MAX_AUDIOQ_SIZE (5 * 16 * 1024)#define MAX_SUBTITLEQ_SIZE (5 * 16 * 1024)/* SDL audio buffer size, in samples. Should be small to have precise   A/V sync as SDL does not have hardware buffer fullness info. */#define SDL_AUDIO_BUFFER_SIZE 1024/* no AV sync correction is done if below the AV sync threshold */#define AV_SYNC_THRESHOLD 0.01/* no AV correction is done if too big error */#define AV_NOSYNC_THRESHOLD 10.0/* maximum audio speed change to get correct sync */#define SAMPLE_CORRECTION_PERCENT_MAX 10/* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */#define AUDIO_DIFF_AVG_NB   20/* NOTE: the size must be big enough to compensate the hardware audio buffersize size */#define SAMPLE_ARRAY_SIZE (2*65536)static int sws_flags = SWS_BICUBIC;typedef struct PacketQueue {    AVPacketList *first_pkt, *last_pkt;    int nb_packets;    int size;    int abort_request;    SDL_mutex *mutex;    SDL_cond *cond;} PacketQueue;#define VIDEO_PICTURE_QUEUE_SIZE 1#define SUBPICTURE_QUEUE_SIZE 4typedef struct VideoPicture {    double pts;                                  ///<presentation time stamp for this picture    SDL_Overlay *bmp;    int width, height; /* source height & width */    int allocated;} VideoPicture;typedef struct SubPicture {    double pts; /* presentation time stamp for this picture */    AVSubtitle sub;} SubPicture;enum {    AV_SYNC_AUDIO_MASTER, /* default choice */    AV_SYNC_VIDEO_MASTER,    AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */};typedef struct VideoState {    SDL_Thread *parse_tid;    SDL_Thread *video_tid;    AVInputFormat *iformat;    int no_background;    int abort_request;    int paused;    int last_paused;    int seek_req;    int seek_flags;    int64_t seek_pos;    AVFormatContext *ic;    int dtg_active_format;    int audio_stream;    int av_sync_type;    double external_clock; /* external clock base */    int64_t external_clock_time;    double audio_clock;    double audio_diff_cum; /* used for AV difference average computation */    double audio_diff_avg_coef;    double audio_diff_threshold;    int audio_diff_avg_count;    AVStream *audio_st;    PacketQueue audioq;    int audio_hw_buf_size;    /* samples output by the codec. we reserve more space for avsync       compensation */    DECLARE_ALIGNED(16,uint8_t,audio_buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2]);    unsigned int audio_buf_size; /* in bytes */    int audio_buf_index; /* in bytes */    AVPacket audio_pkt;    uint8_t *audio_pkt_data;    int audio_pkt_size;    int show_audio; /* if true, display audio samples */    int16_t sample_array[SAMPLE_ARRAY_SIZE];    int sample_array_index;    int last_i_start;    SDL_Thread *subtitle_tid;    int subtitle_stream;    int subtitle_stream_changed;    AVStream *subtitle_st;    PacketQueue subtitleq;    SubPicture subpq[SUBPICTURE_QUEUE_SIZE];    int subpq_size, subpq_rindex, subpq_windex;    SDL_mutex *subpq_mutex;    SDL_cond *subpq_cond;    double frame_timer;    double frame_last_pts;    double frame_last_delay;    double video_clock;                          ///<pts of last decoded frame / predicted pts of next decoded frame    int video_stream;    AVStream *video_st;    PacketQueue videoq;    double video_current_pts;                    ///<current displayed pts (different from video_clock if frame fifos are used)    int64_t video_current_pts_time;              ///<time (av_gettime) at which we updated video_current_pts - used to have running video pts    VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];    int pictq_size, pictq_rindex, pictq_windex;    SDL_mutex *pictq_mutex;    SDL_cond *pictq_cond;    //    QETimer *video_timer;    char filename[1024];    int width, height, xleft, ytop;} VideoState;void show_help(void);static int audio_write_get_buf_size(VideoState *is);/* options specified by the user */static AVInputFormat *file_iformat;static const char *input_filename;static int fs_screen_width;static int fs_screen_height;static int screen_width = 0;static int screen_height = 0;static int frame_width = 0;static int frame_height = 0;static enum PixelFormat frame_pix_fmt = PIX_FMT_NONE;static int audio_disable;static int video_disable;static int wanted_audio_stream= 0;static int wanted_video_stream= 0;static int seek_by_bytes;static int display_disable;static int show_status;static int av_sync_type = AV_SYNC_AUDIO_MASTER;static int64_t start_time = AV_NOPTS_VALUE;static int debug = 0;static int debug_mv = 0;static int step = 0;static int thread_count = 1;static int workaround_bugs = 1;static int fast = 0;static int genpts = 0;static int lowres = 0;static int idct = FF_IDCT_AUTO;static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;static int error_resilience = FF_ER_CAREFUL;static int error_concealment = 3;static int decoder_reorder_pts= 0;/* current context */static int is_full_screen;static VideoState *cur_stream;static int64_t audio_callback_time;AVPacket flush_pkt;#define FF_ALLOC_EVENT   (SDL_USEREVENT)#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)#define FF_QUIT_EVENT    (SDL_USEREVENT + 2)SDL_Surface *screen;/* packet queue handling */static void packet_queue_init(PacketQueue *q){    memset(q, 0, sizeof(PacketQueue));    q->mutex = SDL_CreateMutex();    q->cond = SDL_CreateCond();}static void packet_queue_flush(PacketQueue *q){    AVPacketList *pkt, *pkt1;    SDL_LockMutex(q->mutex);    for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {        pkt1 = pkt->next;        av_free_packet(&pkt->pkt);        av_freep(&pkt);    }    q->last_pkt = NULL;    q->first_pkt = NULL;    q->nb_packets = 0;    q->size = 0;    SDL_UnlockMutex(q->mutex);}static void packet_queue_end(PacketQueue *q){    packet_queue_flush(q);    SDL_DestroyMutex(q->mutex);    SDL_DestroyCond(q->cond);}static int packet_queue_put(PacketQueue *q, AVPacket *pkt){    AVPacketList *pkt1;    /* duplicate the packet */    if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)        return -1;    pkt1 = av_malloc(sizeof(AVPacketList));    if (!pkt1)        return -1;    pkt1->pkt = *pkt;    pkt1->next = NULL;    SDL_LockMutex(q->mutex);    if (!q->last_pkt)        q->first_pkt = pkt1;    else        q->last_pkt->next = pkt1;    q->last_pkt = pkt1;    q->nb_packets++;    q->size += pkt1->pkt.size;    /* XXX: should duplicate packet data in DV case */    SDL_CondSignal(q->cond);    SDL_UnlockMutex(q->mutex);    return 0;}static void packet_queue_abort(PacketQueue *q){    SDL_LockMutex(q->mutex);    q->abort_request = 1;    SDL_CondSignal(q->cond);    SDL_UnlockMutex(q->mutex);}/* return < 0 if aborted, 0 if no packet and > 0 if packet.  */static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block){    AVPacketList *pkt1;    int ret;    SDL_LockMutex(q->mutex);    for(;;) {        if (q->abort_request) {            ret = -1;            break;        }        pkt1 = q->first_pkt;        if (pkt1) {            q->first_pkt = pkt1->next;            if (!q->first_pkt)                q->last_pkt = NULL;            q->nb_packets--;            q->size -= pkt1->pkt.size;            *pkt = pkt1->pkt;            av_free(pkt1);            ret = 1;            break;        } else if (!block) {            ret = 0;            break;        } else {            SDL_CondWait(q->cond, q->mutex);        }    }    SDL_UnlockMutex(q->mutex);    return ret;}static inline void fill_rectangle(SDL_Surface *screen,                                  int x, int y, int w, int h, int color){    SDL_Rect rect;    rect.x = x;    rect.y = y;    rect.w = w;    rect.h = h;    SDL_FillRect(screen, &rect, color);}#if 0/* draw only the border of a rectangle */void fill_border(VideoState *s, int x, int y, int w, int h, int color){    int w1, w2, h1, h2;    /* fill the background */    w1 = x;    if (w1 < 0)        w1 = 0;    w2 = s->width - (x + w);    if (w2 < 0)        w2 = 0;    h1 = y;    if (h1 < 0)        h1 = 0;    h2 = s->height - (y + h);    if (h2 < 0)        h2 = 0;    fill_rectangle(screen,                   s->xleft, s->ytop,                   w1, s->height,                   color);    fill_rectangle(screen,                   s->xleft + s->width - w2, s->ytop,                   w2, s->height,                   color);    fill_rectangle(screen,                   s->xleft + w1, s->ytop,                   s->width - w1 - w2, h1,                   color);    fill_rectangle(screen,                   s->xleft + w1, s->ytop + s->height - h2,                   s->width - w1 - w2, h2,                   color);}#endif#define SCALEBITS 10#define ONE_HALF  (1 << (SCALEBITS - 1))#define FIX(x)    ((int) ((x) * (1<<SCALEBITS) + 0.5))#define RGB_TO_Y_CCIR(r, g, b) \((FIX(0.29900*219.0/255.0) * (r) + FIX(0.58700*219.0/255.0) * (g) + \  FIX(0.11400*219.0/255.0) * (b) + (ONE_HALF + (16 << SCALEBITS))) >> SCALEBITS)#define RGB_TO_U_CCIR(r1, g1, b1, shift)\(((- FIX(0.16874*224.0/255.0) * r1 - FIX(0.33126*224.0/255.0) * g1 +         \     FIX(0.50000*224.0/255.0) * b1 + (ONE_HALF << shift) - 1) >> (SCALEBITS + shift)) + 128)#define RGB_TO_V_CCIR(r1, g1, b1, shift)\(((FIX(0.50000*224.0/255.0) * r1 - FIX(0.41869*224.0/255.0) * g1 -           \   FIX(0.08131*224.0/255.0) * b1 + (ONE_HALF << shift) - 1) >> (SCALEBITS + shift)) + 128)#define ALPHA_BLEND(a, oldp, newp, s)\((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))#define RGBA_IN(r, g, b, a, s)\{\    unsigned int v = ((const uint32_t *)(s))[0];\    a = (v >> 24) & 0xff;\    r = (v >> 16) & 0xff;\    g = (v >> 8) & 0xff;\    b = v & 0xff;\}#define YUVA_IN(y, u, v, a, s, pal)\{\    unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\    a = (val >> 24) & 0xff;\    y = (val >> 16) & 0xff;\    u = (val >> 8) & 0xff;\    v = val & 0xff;\}#define YUVA_OUT(d, y, u, v, a)\{\    ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\}#define BPP 1static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh){    int wrap, wrap3, width2, skip2;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -