📄 javasoundparser.java
字号:
package net.sf.fmj.media.parser;import java.io.BufferedInputStream;import java.io.IOException;import java.io.InputStream;import java.util.logging.Level;import java.util.logging.Logger;import javax.media.BadHeaderException;import javax.media.Buffer;import javax.media.Duration;import javax.media.Format;import javax.media.IncompatibleSourceException;import javax.media.ResourceUnavailableException;import javax.media.Time;import javax.media.Track;import javax.media.protocol.ContentDescriptor;import javax.media.protocol.DataSource;import javax.media.protocol.FileTypeDescriptor;import javax.media.protocol.PullDataSource;import javax.media.protocol.PullSourceStream;import javax.media.protocol.SourceCloneable;import javax.sound.sampled.AudioInputStream;import javax.sound.sampled.AudioSystem;import javax.sound.sampled.UnsupportedAudioFileException;import net.sf.fmj.media.AbstractDemultiplexer;import net.sf.fmj.media.AbstractTrack;import net.sf.fmj.media.PullSourceStreamInputStream;import net.sf.fmj.media.renderer.audio.JavaSoundRenderer;import net.sf.fmj.utility.LoggerSingleton;/** * * @author Ken Larson * */public class JavaSoundParser extends AbstractDemultiplexer { private static final Logger logger = LoggerSingleton.logger; /* * If readFrame reads from pullSourceStreamInputStreamForReadFrame without audioInputStreamForReadFrame being opened, * then the codec is getting the raw data WITH the header. So it is able to create a converted AudioInputStream. * But if we don't use a codec, then the renderer will get the header, which will come out as a short click or noise * before the sound. * * I see two options: * 1. the header data gets passed using some special mechanism, like in a buffer header or in a buffer with a special flag set, * so the codec knows to use it but a renderer will ignore it. * 2. the codec reconstructs a header based on the format. * * However, it is potentially worse than that, since WAV files are potentially stored as chunks, meaning there is more than * just one header up front. So I don't see that option 1 is very good. * * Another possibility is we could change the reported output format to not be a standard AudioFormat, but to be a WAV audio format, * then let there be a specific codec for that. * * With #2, we could have some luck, because any internal headers will be stripped out by the audio input stream. * So we don't have to have the codec put on the exact correct header, it just has to be one that * allows getAudioInputStream to work. * * Method 2 works. * */ private ContentDescriptor[] supportedInputContentDescriptors = new ContentDescriptor[] { new ContentDescriptor(FileTypeDescriptor.WAVE), // .wav new ContentDescriptor(FileTypeDescriptor.BASIC_AUDIO), // .au new ContentDescriptor(FileTypeDescriptor.AIFF), // .aiff new ContentDescriptor(FileTypeDescriptor.MPEG_AUDIO), // .mp3 (requires mp3 SPI) - TODO: can we determine from JavaSound whether we have this SPI? new ContentDescriptor("audio.ogg"), // .ogg (requires ogg SPI) new ContentDescriptor("application.ogg"), // this has been observed in the wild as well, as an alias for audio.ogg }; // AudioSystem.getAudioFileTypes() does not return .mp3, even if the mp3 SPI is in the classpath. So how can we // find out if it is present? // we need to open two streams, one is a clone of the other, because determining the format // changes the stream position. private PullDataSource sourceForFormat; private PullDataSource sourceForReadFrame; private PullSourceStreamTrack[] tracks; //@Override public ContentDescriptor[] getSupportedInputContentDescriptors() { return supportedInputContentDescriptors; } //@Override public Track[] getTracks() throws IOException, BadHeaderException { return tracks; } // if we do the open in setSource, then we can identify an incompatible source // right away. This is useful because the JMF/FMJ infrastructure (via the Manager) will // try a new Demuliplexer in this case. If open fails, the Manager will not retry. // this is useful because in the case of .ogg, this parser will only handle audio, not video. // we need to use another Demultiplxer if there is video. private static final boolean OPEN_IN_SET_SOURCE = true; //@Override public void setSource(DataSource source) throws IOException, IncompatibleSourceException { if (!(source instanceof PullDataSource)) throw new IncompatibleSourceException(); if (!(source instanceof SourceCloneable)) throw new IncompatibleSourceException(); this.sourceForFormat = (PullDataSource) source; if (OPEN_IN_SET_SOURCE) { try { doOpen(); } catch (UnsupportedAudioFileException e) { logger.log(Level.WARNING, "" + e, e); throw new IncompatibleSourceException("" + e); } catch (IOException e) { logger.log(Level.WARNING, "" + e, e); throw e; } } } private void doOpen() throws IOException, UnsupportedAudioFileException { sourceForReadFrame = (PullDataSource) ((SourceCloneable) sourceForFormat).createClone(); sourceForReadFrame.connect(); sourceForReadFrame.start(); sourceForFormat.start(); final PullSourceStream[] streamsForFormat = sourceForFormat.getStreams(); final PullSourceStream[] streamsForReadFrame = sourceForReadFrame.getStreams(); tracks = new PullSourceStreamTrack[streamsForFormat.length]; for (int i = 0; i < streamsForFormat.length; ++i) { tracks[i] = new PullSourceStreamTrack(streamsForFormat[i], streamsForReadFrame[i]); } } //@Override public void open() throws ResourceUnavailableException { if (!OPEN_IN_SET_SOURCE) { try { doOpen(); } catch (UnsupportedAudioFileException e) { logger.log(Level.WARNING, "" + e, e); throw new ResourceUnavailableException("" + e); } catch (IOException e) { logger.log(Level.WARNING, "" + e, e); throw new ResourceUnavailableException("" + e); } } } //@Override public void start() throws IOException { } // TODO: should we stop data source in stop?// // @Override// public void stop()// {// try // {// source.stop();// } catch (IOException e) // {// logger.log(Level.WARNING, "" + e, e);// }// } //@Override public boolean isPositionable() { return true; } //@Override public boolean isRandomAccess() { return super.isRandomAccess(); // TODO: can we determine this from the data source? } //@Override public Time setPosition(Time where, int rounding) { // TODO: maybe we should just set a variable, and have the readFrame method handle the skip. // TODO: what do do about mp3/ogg? // TODO: what to do with rounding info? // if we can't skip based on nanos, then we can't seek. This happens for mp3/ogg - compressed formats. for (int i = 0; i < tracks.length; ++i) { if (!tracks[i].canSkipNanos()) return super.setPosition(where, rounding); } try { logger.fine("JavaSoundParser: cloning, reconnecting, and restarting source"); // just clone the data source again, start at zero, then skip to the position we want. sourceForReadFrame = (PullDataSource) ((SourceCloneable) sourceForFormat).createClone(); sourceForReadFrame.connect(); sourceForReadFrame.start(); for (int i = 0; i < tracks.length; ++i) { tracks[i].setPssForReadFrame(sourceForReadFrame.getStreams()[i]); if (where.getNanoseconds() > 0) tracks[i].skipNanos(where.getNanoseconds()); // TODO: check result } return where; // TODO: } catch (IOException e) { logger.log(Level.WARNING, "" + e, e); throw new RuntimeException(e); // TODO: how to handle } catch (UnsupportedAudioFileException e) { logger.log(Level.WARNING, "" + e, e); throw new RuntimeException(e); // TODO: how to handle } } private class PullSourceStreamTrack extends AbstractTrack { // TODO: track listener private final javax.media.format.AudioFormat format; private final javax.sound.sampled.AudioFormat javaSoundInputFormat; private final long frameLength; // length of media in frames private PullSourceStream pssForReadFrame; private PullSourceStreamInputStream pssisForReadFrame; private AudioInputStream aisForReadFrame; private long totalBytesRead = 0L; // keep track of total bytes so we can compute current timestamp
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -