📄 cimgffmpeg.h
字号:
/*# File: cimgffmpeg.h# # Description: plugin allowing convenient access to# the frames of a video file through the# CImg and CImgList classes; uses the ffmpeg# function library.# Copyright: David G. Starkweather# starkdg@users.sourceforge.net# starkdg@comcast.net# License:# # Instructions: This header depends on the ffmpeg libraries v13263## 1.) obtain ffmpeg from svn (svn://svn.mplayerhq.hu/ffmpeg/trunk)## 2.) configure with the following options# ./configure --prefix=/usr/local --enable-shared --enable-swscale # --enable-avfilter --enable-pthreads --enable-gpl # --disable-ffmpeg --disable-ffserver --disable-ffplay# (I disable the ffmpeg, ffserver and ffplay utilities only because they are not needed by this header.)# 3.) make# 4.) make install## Simply include this header in your code and link to the necessary libs.# You will probably have to edit the #include "cimgffmpeg.h" directives to# point to the correct directory level.## For your project that uses this header:## A command line invocation of the compiler might look something like this:## g++ -Dcimg_use_xshm -Dcimg_use_xrandr -Ucimg_use_xrandr -Ucimg_use_xshm -I/usr/include/X11 # -O2 -g3 -Wall -c -fmessage-length=0 -MMD -MP -MF"src/ffmpegplugindemo.d" -MT"src/ffmpegplugindemo.d" -o"src/ffmpegplugindemo.o" "../src/ffmpegplugindemo.cpp"## And the linker:## g++ -L/usr/local/lib -L/usr/X11R6/lib64 -o{nameofyourproject}" ./src/{yourproject}.o# -lavutil -lavformat -lavcodec -lavutil -lswscale -lm -ldl -X11 -lXext -lXrandr # -lpthread## set LD_LIBRARY_PATH environment variable to point to the lib location (e.g. /usr/local/lib)*/#ifndef CIMGFFMPEG_H_#define CIMGFFMPEG_H_#include <ctime>#include <pthread.h>#include "string.h"#include "CImg.h"extern "C" { #include "libavformat/avformat.h" #include "libavcodec/avcodec.h" #include "libswscale/swscale.h"}/* struct to hold video information to be passed to PlayVideo thread*/struct video_info_t { int fps; void *pList;};using namespace cimg_library;//! Read frames from the given file into the CImgList parameter/* * Reads nb_retrieval frames from the video stream from low_index to hi_index in step number of intervals. * The value of pixelformat parameter determines the frame type. The pixel format value is translated into * ffmpeg library's corresponding PIX_FMT enumeration. * * @param filename const char* - complete filename * @param pFrameList CImgList<>* - an empty CImgList * @param lo_index int - the start index frame * @param hi_index int - the end index frame * @param step int - the interval * @param nb_retrieval int - maximum number of frames to retrieve * @param pixelformat int - pixel format ( =0 for GRAY8 (8bpp grayscale) and =1 for RGB24 (24 bpp RGB) ) * @return int - the number of frames read; -1 for unable to read frames; 0 for no frames.*/int ReadFrames(const char *filename, CImgList<unsigned char> *pFrameList, unsigned int low_index, unsigned int hi_index, int step = 1, long nb_retrieval=100){ //target pixel format int ffmpeg_pixfmt = PIX_FMT_GRAY8; av_register_all(); AVFormatContext *pFormatCtx; // Open video file if(av_open_input_file(&pFormatCtx, filename, NULL, 0, NULL)!=0) return -1 ; // Couldn't open file // Retrieve stream information if(av_find_stream_info(pFormatCtx)<0) return -1; // Couldn't find stream information dump_format(pFormatCtx,0,NULL,0);//debugging function to print infomation about format unsigned int i; AVCodecContext *pCodecCtx; // Find the video stream int videoStream=-1; for(i=0; i<pFormatCtx->nb_streams; i++) { if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO) { videoStream=i; break; } if(videoStream==-1) return -1; //no video stream } // Get a pointer to the codec context for the video stream pCodecCtx=pFormatCtx->streams[videoStream]->codec; AVCodec *pCodec; // Find the decoder pCodec=avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) { return -1 ; // Codec not found } // Open codec if(avcodec_open(pCodecCtx, pCodec)<0) return -1; // Could not open codec AVFrame *pFrame; // Allocate video frame pFrame=avcodec_alloc_frame(); // Allocate an AVFrame structure AVFrame *pConvertedFrame = avcodec_alloc_frame(); if(pConvertedFrame==NULL) return -1; uint8_t *buffer; int numBytes; // Determine required buffer size and allocate buffer numBytes=avpicture_get_size(ffmpeg_pixfmt, pCodecCtx->width,pCodecCtx->height); buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t)); avpicture_fill((AVPicture *)pConvertedFrame, buffer, ffmpeg_pixfmt,pCodecCtx->width, pCodecCtx->height); int frameFinished; unsigned int size = 0; unsigned int current_index = low_index; unsigned int next_index = current_index; AVPacket packet; while(av_read_frame(pFormatCtx, &packet)>=0) { if(packet.stream_index==videoStream) { avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, packet.data, packet.size); if(frameFinished) { if (current_index == next_index){ next_index += step; SwsContext *c = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, ffmpeg_pixfmt , 1, NULL, NULL, NULL); sws_scale(c, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pConvertedFrame->data, pConvertedFrame->linesize); CImg<uint8_t> *pNextImage = new CImg<unsigned char>(*pConvertedFrame->data,1,pCodecCtx->width,pCodecCtx->height,1,false); CImg<uint8_t> NextImage = pNextImage->get_permute_axes("yzvx"); NextImage.blur(1.0).resize(32,32); pFrameList->push_back(NextImage); size++; } current_index++; } av_free_packet(&packet); if (next_index >= hi_index) break; } } av_free(buffer); av_free(pConvertedFrame); av_free(pFrame); avcodec_close(pCodecCtx); av_close_input_file(pFormatCtx); return size; }//! read frames from stream at index where it left off on previous invocation of function/* Read up to nb_retrieval frames from filename video stream at step intervals; convert frames to given pixelformat; insert in empty pFrameList CImgList; meant to be called consecutively until the end of stream is reached, at which point another call will reinitialize at the beginning of the stream. @param filename const char* - name of the file @param pFrameList CImgList<>* - an empty CImgList<> @param step int - the step interval @param nb_retrieval int - max number to retrieve at a time @param pixelformat int - pixel format ( 0 = GRAY8 (8bpp grayscale), 1 = RGB24 (24 bpp RGB) ) @return number of frames read or 0 if end of stream reached, -1 unable to read from stream*/int NextFrames(const char *filename, CImgList<unsigned char> *pFrameList, int step = 1, unsigned int nb_retrieval=100, int pixelformat = 0){ static int initd = 0; static int videoStream = -1; static AVFormatContext *pFormatCtx = (AVFormatContext*)malloc(sizeof(AVFormatContext)); static AVCodecContext *pCodecCtx = (AVCodecContext*)malloc(sizeof(AVCodecContext)); static AVCodec *pCodec = (AVCodec*)malloc(sizeof(AVCodec)); //determine destination pixel format from pixelformat input variable //ffmpeg_pixfmt values are taken from enum PixelFormat in avutil.h int ffmpeg_pixfmt = PIX_FMT_RGB24; if (pixelformat == 1) ffmpeg_pixfmt = PIX_FMT_GRAY8; if (!initd) { av_register_all(); // Open video file if(av_open_input_file(&pFormatCtx, filename, NULL, 0, NULL)!=0) return -1 ; // Couldn't open file // Retrieve stream information if(av_find_stream_info(pFormatCtx)<0) return -1; // Couldn't find stream information unsigned int i; // Find the video stream for(i=0; i<pFormatCtx->nb_streams; i++) { if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO) { videoStream=i; break; } if(videoStream==-1) return -1; //no video stream } // Get a pointer to the codec context for the video stream pCodecCtx=pFormatCtx->streams[videoStream]->codec; // Find the decoder pCodec=avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) { return -1 ; // Codec not found } // Open codec if(avcodec_open(pCodecCtx, pCodec)<0) return -1; // Could not open codec initd = 1; } AVFrame *pFrame; // Allocate video frame pFrame=avcodec_alloc_frame(); // Allocate an AVFrame structure AVFrame *pConvertedFrame = avcodec_alloc_frame(); if(pConvertedFrame==NULL) return -1; uint8_t *buffer; int numBytes; // Determine required buffer size and allocate buffer numBytes=avpicture_get_size(ffmpeg_pixfmt, pCodecCtx->width,pCodecCtx->height); buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t)); avpicture_fill((AVPicture *)pConvertedFrame, buffer, ffmpeg_pixfmt,pCodecCtx->width, pCodecCtx->height); int frameFinished; unsigned int size = 0; unsigned int current_index = 0; unsigned int next_index = current_index; AVPacket packet; int result = 1; while ((result >= 0) && (size < nb_retrieval)) { result = av_read_frame(pFormatCtx, &packet); if (result < 0) break; if(packet.stream_index==videoStream) { avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, packet.data, packet.size); if(frameFinished) { if (current_index == next_index) { next_index += step; SwsContext *c = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, ffmpeg_pixfmt , 1, NULL, NULL, NULL); sws_scale(c, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pConvertedFrame->data, pConvertedFrame->linesize); if (ffmpeg_pixfmt == PIX_FMT_RGB24) { CImg<unsigned char> *pNextImage = new CImg<unsigned char>(*pConvertedFrame->data,3,pCodecCtx->width,pCodecCtx->height,1,true); CImg<unsigned char> NextImage = pNextImage->get_permute_axes("yzvx"); pFrameList->push_back(NextImage); size++; } else if (ffmpeg_pixfmt == PIX_FMT_GRAY8){ CImg<unsigned char> *pNextImage = new CImg<unsigned char>(*pConvertedFrame->data,1,pCodecCtx->width,pCodecCtx->height,1,true); CImg<unsigned char> NextImage = pNextImage->get_permute_axes("yzvx"); pFrameList->push_back(NextImage); size++; } } current_index++; } } av_free_packet(&packet); } av_free(buffer); av_free(pConvertedFrame); av_free(pFrame); if (result < 0) { avcodec_close(pCodecCtx); av_close_input_file(pFormatCtx); initd = 0; return 0; } return size; }//! Get number of streams contained in given file/* Return the number of streams contained in the given file format. @param file const char* @return number of streams*/int GetNumberStreams(const char *file){ AVFormatContext *pFormatCtx; av_register_all(); // Open video file if (av_open_input_file(&pFormatCtx, file, NULL, 0, NULL)) return -1 ; // Couldn't open file // Retrieve stream information if(av_find_stream_info(pFormatCtx)<0) return -1; // Couldn't find stream information int result = pFormatCtx->nb_streams; av_close_input_file(pFormatCtx); return result;}//! get number of video frames contained in file/* * return number of video frames in file. * @param file const char* - video file * @return number of video frames*/long GetNumberVideoFrames(const char *file){ long nb_frames = 0L; AVFormatContext *pFormatCtx; av_register_all(); // Open video file if (av_open_input_file(&pFormatCtx, file, NULL, 0, NULL)) return -1 ; // Couldn't open file // Retrieve stream information if(av_find_stream_info(pFormatCtx)<0) return -1; // Couldn't find stream information // Find the first video stream int videoStream=-1; for(unsigned int i=0; i<pFormatCtx->nb_streams; i++) { if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -