📄 ffmpegparser.java
字号:
package net.sf.fmj.ffmpeg_java;import java.awt.Dimension;import java.io.IOException;import java.util.ArrayList;import java.util.List;import java.util.logging.Level;import java.util.logging.Logger;import javax.media.BadHeaderException;import javax.media.Buffer;import javax.media.Duration;import javax.media.Format;import javax.media.IncompatibleSourceException;import javax.media.ResourceUnavailableException;import javax.media.Time;import javax.media.Track;import javax.media.format.AudioFormat;import javax.media.format.RGBFormat;import javax.media.format.VideoFormat;import javax.media.protocol.ContentDescriptor;import javax.media.protocol.DataSource;import javax.media.protocol.PullDataSource;import net.sf.ffmpeg_java.AVCodecLibrary;import net.sf.ffmpeg_java.AVFormatLibrary;import net.sf.ffmpeg_java.AVUtilLibrary;import net.sf.ffmpeg_java.AVCodecLibrary.AVCodec;import net.sf.ffmpeg_java.AVCodecLibrary.AVCodecContext;import net.sf.ffmpeg_java.AVCodecLibrary.AVFrame;import net.sf.ffmpeg_java.AVFormatLibrary.AVFormatContext;import net.sf.ffmpeg_java.AVFormatLibrary.AVInputFormat;import net.sf.ffmpeg_java.AVFormatLibrary.AVOutputFormat;import net.sf.ffmpeg_java.AVFormatLibrary.AVPacket;import net.sf.ffmpeg_java.AVFormatLibrary.AVStream;import net.sf.ffmpeg_java.FFMPEGLibrary.AVRational;import net.sf.ffmpeg_java.custom_protocol.CallbackURLProtocolMgr;import net.sf.fmj.media.AbstractDemultiplexer;import net.sf.fmj.media.AbstractTrack;import net.sf.fmj.utility.LoggerSingleton;import net.sf.fmj.utility.URLUtils;import com.lti.utils.collections.Queue;import com.sun.jna.Pointer;import com.sun.jna.ptr.IntByReference;import com.sun.jna.ptr.PointerByReference;/** * * Demultiplexer which uses ffmpeg.java native wrapper around ffmpeg (libavformat, libavcodec, libavutil). * @author Ken Larson * */public class FFMPEGParser extends AbstractDemultiplexer { private static final Logger logger = LoggerSingleton.logger; private static final boolean PROCEED_IF_NO_AUDIO_CODEC = true; // if true, we'll play back video only if we are missing an audio codec. Typical example: Mpeg-4 AAC private final AVFormatLibrary AVFORMAT; private final AVCodecLibrary AVCODEC; private final AVUtilLibrary AVUTIL; private AVFormatContext formatCtx; // if USE_DATASOURCE_URL_ONLY is true, this is a bit of a hack - we don't really use the DataSource, we just grab its URL. So arbitrary data sources won't work. // otherwise, we register a custom URLHandler with ffmpeg, which calls us back to get the data. private final boolean USE_DATASOURCE_URL_ONLY = false; private ContentDescriptor[] supportedInputContentDescriptors = null; static final String FIRST_FFMPEG_DEMUX_NAME = "aac"; public FFMPEGParser() { try { AVFORMAT = AVFormatLibrary.INSTANCE; AVCODEC = AVCodecLibrary.INSTANCE; AVUTIL = AVUtilLibrary.INSTANCE; AVFORMAT.av_register_all(); queryInputContentDescriptors(); } catch (Throwable t) { logger.log(Level.WARNING, "Unable to initialize ffmpeg libraries: " + t); throw new RuntimeException(t); } } private static final Object AV_SYNC_OBJ = new Boolean(true); // synchronize on this before using the libraries, to prevent threading problems. private PullDataSource source; private PullSourceStreamTrack[] tracks; private Queue[] packetQueues; // Queue of AVPacket //@Override public ContentDescriptor[] getSupportedInputContentDescriptors() { return supportedInputContentDescriptors; } protected void queryInputContentDescriptors(){ // get content descriptors from ffmpeg List contentDescriptors = new ArrayList(); int i = 1; AVInputFormat avInputFormat = AVFORMAT.av_find_input_format(FIRST_FFMPEG_DEMUX_NAME); while (avInputFormat != null) { String mimeType = null; AVOutputFormat avOutputFormat = AVFORMAT.guess_format(avInputFormat.name, null,null); if (avOutputFormat != null && avOutputFormat.mime_type != null && avOutputFormat.mime_type.length() > 0) { mimeType = avOutputFormat.mime_type; }else{ mimeType = "ffmpeg/"+avInputFormat.name; } logger.log(Level.FINEST, i++ + ". " + avInputFormat.long_name + " : " + mimeType); contentDescriptors.add(new ContentDescriptor(ContentDescriptor.mimeTypeToPackageName(mimeType))); if (avInputFormat.next != null && avInputFormat.next.isValid()) { avInputFormat = new AVInputFormat(avInputFormat.next); }else{ avInputFormat = null; } } // add content types which ffmpeg supports but doesn't report contentDescriptors.add(new ContentDescriptor("video.quicktime")); contentDescriptors.add(new ContentDescriptor("video.x_ms_wmv")); contentDescriptors.add(new ContentDescriptor("video.mp4")); contentDescriptors.add(new ContentDescriptor("video.3gpp")); contentDescriptors.add(new ContentDescriptor("video.mp2p")); supportedInputContentDescriptors = (ContentDescriptor[])contentDescriptors.toArray(new ContentDescriptor[0]); } //@Override public Track[] getTracks() throws IOException, BadHeaderException { return tracks; } //@Override public void setSource(DataSource source) throws IOException, IncompatibleSourceException { final String protocol = source.getLocator().getProtocol(); if (USE_DATASOURCE_URL_ONLY) { if (!(protocol.equals("file") || protocol.equals("http"))) throw new IncompatibleSourceException(); // TODO: ffmpeg appears to support multiple file protocols, for example: file: pipe: udp: rtp: tcp: http: // we should also allow those. // TODO: would be best to query this dynamically from ffmpeg } else { if (!(source instanceof PullDataSource)) throw new IncompatibleSourceException();// if (!(source instanceof SourceCloneable))// throw new IncompatibleSourceException(); } this.source = (PullDataSource) source; } //@Override public void open() throws ResourceUnavailableException { synchronized (AV_SYNC_OBJ) { try { AVCODEC.avcodec_init(); // TODO: everything seems to be fine if we don't call this... } catch (Throwable t) { logger.log(Level.WARNING, "" + t, t); throw new ResourceUnavailableException("avcodec_init or av_register_all failed"); } // not sure what the consequences of such a mismatch are, but it is worth logging a warning: if (AVCODEC.avcodec_version() != AVCodecLibrary.LIBAVCODEC_VERSION_INT) logger.warning("ffmpeg-java and ffmpeg versions do not match: avcodec_version=" + AVCODEC.avcodec_version() + " LIBAVCODEC_VERSION_INT=" + AVCodecLibrary.LIBAVCODEC_VERSION_INT); final String urlStr; if (USE_DATASOURCE_URL_ONLY) { // just use the URL from the datasource // FMJ supports relative file URLs, but FFMPEG does not. So we'll rewrite the URL here: // TODO: perhaps we should only do this if FFMPEG has a problem (av_open_input_file returns nonzero). if (source.getLocator().getProtocol().equals("file")) urlStr = URLUtils.createAbsoluteFileUrl(source.getLocator().toExternalForm()); else urlStr = source.getLocator().toExternalForm(); } else { // use the real java datasource, via callbacks. CallbackURLProtocolMgr.register(AVFORMAT); // TODO: do this in start? final String callbackURL = CallbackURLProtocolMgr.addCallbackURLProtocolHandler(new PullDataSourceCallbackURLProtocolHandler(source)); // TODO: we need to remove the handler when we are done. urlStr = callbackURL; } final PointerByReference ppFormatCtx = new PointerByReference(); // Open video file final int ret = AVFORMAT.av_open_input_file(ppFormatCtx, urlStr, null, 0, null); if (ret != 0) throw new ResourceUnavailableException("av_open_input_file failed: " + ret); // Couldn't open file formatCtx = new AVFormatContext(ppFormatCtx.getValue()); //System.out.println(new String(formatCtx.filename)); // Retrieve stream information if (AVFORMAT.av_find_stream_info(formatCtx) < 0) throw new ResourceUnavailableException("Couldn't find stream information"); // Couldn't find stream information AVFORMAT.dump_format(formatCtx, 0, urlStr, 0); VideoTrack videoTrack = null; AudioTrack audioTrack = null; for (int i = 0; i < formatCtx.nb_streams; i++) { final AVStream stream = new AVStream(formatCtx.getStreams()[i]); final AVCodecContext codecCtx = new AVCodecContext(stream.codec); if (codecCtx.codec_id == 0) { logger.info("Codec id is zero (no codec) - skipping stream " + i); continue; } if (codecCtx.codec_type == AVCodecLibrary.CODEC_TYPE_VIDEO && videoTrack == null) { videoTrack = new VideoTrack(i, stream, codecCtx); } else if (codecCtx.codec_type == AVCodecLibrary.CODEC_TYPE_AUDIO && audioTrack == null) { try { audioTrack = new AudioTrack(i, stream, codecCtx); } catch (ResourceUnavailableException e) { if (!PROCEED_IF_NO_AUDIO_CODEC) throw e; logger.log(Level.WARNING, "Skipping audio track: " + e, e); } } else { //throw new ResourceUnavailableException("Unknown track codec type " + codecCtx.codec_type + " for track " + i); } } if (audioTrack == null && videoTrack == null) throw new ResourceUnavailableException("No audio or video track found"); else if (audioTrack != null && videoTrack != null) tracks = new PullSourceStreamTrack[] {videoTrack, audioTrack}; else if (audioTrack != null) tracks = new PullSourceStreamTrack[] {audioTrack}; else tracks = new PullSourceStreamTrack[] {videoTrack}; packetQueues = new Queue[formatCtx.nb_streams]; for (int i = 0; i < packetQueues.length; ++i) packetQueues[i] = new Queue(); } super.open(); } public void close() { synchronized (AV_SYNC_OBJ) { if (tracks != null) { for (int i = 0; i < tracks.length; ++i) { if (tracks[i] != null) { tracks[i].deallocate(); tracks[i] = null; } } tracks = null; } // Close the video file if (formatCtx != null) { AVFORMAT.av_close_input_file(formatCtx); formatCtx = null; } } super.close(); } //@Override public void start() throws IOException { } // TODO: should we stop data source in stop?// // @Override// public void stop()// {// try // {// source.stop();// } catch (IOException e) // {// logger.log(Level.WARNING, "" + e, e);// }// } //@Override public boolean isPositionable() { return true; } //@Override public Time setPosition(Time where, int rounding) { // TODO: how to use rounding? synchronized (AV_SYNC_OBJ) { // when stream is -1, units are AV_TIME_BASE. // TODO: tutorial 7 on www.dranger.com suggests that the -1 can sometimes cause problems... final int result = AVFORMAT.av_seek_frame(formatCtx, -1, where.getNanoseconds() / 1000L, 0); if (result < 0) { logger.severe("av_seek_frame failed with code " + result); // TODO: what to return if error? } return where; // TODO: what to return // TODO: we have to reset the frame counters on the tracks.... } } //@Override public boolean isRandomAccess() { return super.isRandomAccess(); // TODO: can we determine this from the data source? } public static VideoFormat convertCodecPixelFormat(int pixFmt, int width, int height, double frameRate) { final int bitsPerPixel; if (pixFmt == AVCodecLibrary.PIX_FMT_RGB24) bitsPerPixel = 24;// else if (pixFmt == AVCodecLibrary.PIX_FMT_RGB32) // TODO: see comments on PIX_FMT_RGB32 in libavutil/avutil.h// bitsPerPixel = 32; else throw new IllegalArgumentException(); // TODO: support other formats final int red, green, blue; red = 1; green = 2; blue = 3; return new RGBFormat(new Dimension(width, height), -1, byte[].class, (float) frameRate, bitsPerPixel, red, green, blue); } static AVRational getTimeBase(AVStream stream, AVCodecContext codecCtx) { // code adapted from ffmpeg utils.c: dump_format if (stream.r_frame_rate.num != 0 && stream.r_frame_rate.den != 0) { AVRational result = new AVRational(); result.num = stream.r_frame_rate.den; result.den = stream.r_frame_rate.num; return result; } else if (stream.time_base.num != 0 && stream.time_base.den != 0) return stream.time_base; else return codecCtx.time_base; } static double getFPS(AVStream stream, AVCodecContext codecCtx) { final AVRational time_base = getTimeBase(stream, codecCtx); return (double) time_base.den / (double) time_base.num; } static long getTimestamp(final AVFrame frame, final AVStream stream, final AVCodecContext codecCtx, long frameNo, long packetDts) { // from AVFrame, regarding int64_t pts: /** * presentation timestamp in time_base units (time when frame should be shown to user) * If AV_NOPTS_VALUE then frame_rate = 1/time_base will be assumed. */ // from AVCodecContext, regarding time_base: /** * This is the fundamental unit of time (in seconds) in terms * of which frame timestamps are represented. For fixed-fps content, * timebase should be 1/framerate and timestamp increments should be * identically 1. */ // the time base here is used for calculating based on frame number. // TODO: if other calculations are used, using pts/dts, then this may not be correct. final AVRational time_base = getTimeBase(stream, codecCtx);//codecCtx.time_base; // TODO: the frame rate is in frames, where half of an interlaced frame counts as 1. so for interlaced video, // this has to be taken into account. // for example safexmas.move is reported as :// Duration: 00:00:16.4, start: 0.000000, bitrate: 1730 kb/s// Stream #0.0(eng): Video: cinepak, yuv420p, 320x200, 30.00 fps(r) // and it has 220 frames. But 220/16.4=13.4. // see http://www.dranger.com/ffmpeg/tutorial05.html // for a good discussion on pts. // TODO: for now, we'll just use the packetDts, since pts seems to always be zero. if (packetDts == AVCodecLibrary.AV_NOPTS_VALUE) // TODO: with some movies, pts is just always zero, so we'll handle it the same way. { // If AV_NOPTS_VALUE then frame_rate = 1/time_base will be assumed. // therefore we need to know the frame # return (1000000000L * frameNo * (long) time_base.num) / (long) time_base.den; } else { // TODO: the code to do the calculation based on the dts is wrong, so we'll just use the frame number based // calculation for now. // not sure how to calculate the correct dts for a frame. // try 4harmonic.mpg for an example of this. return (1000000000L * frameNo * (long) time_base.num) / (long) time_base.den; //return ( 1000000000L * packetDts * (long) time_base.num) / (long) time_base.den; // TODO: is this correct? it appears to be based on the AVFrame comment, but has not been tested yet. }
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -