📄 handler.java
字号:
package net.sf.fmj.media.content.unknown;import java.awt.Component;import java.awt.Rectangle;import java.io.IOException;import java.util.ArrayList;import java.util.List;import java.util.logging.Level;import java.util.logging.Logger;import javax.media.BadHeaderException;import javax.media.Buffer;import javax.media.Clock;import javax.media.ClockStoppedException;import javax.media.Codec;import javax.media.Demultiplexer;import javax.media.Format;import javax.media.GainControl;import javax.media.IncompatibleSourceException;import javax.media.IncompatibleTimeBaseException;import javax.media.InternalErrorEvent;import javax.media.Multiplexer;import javax.media.NotConfiguredError;import javax.media.NotRealizedError;import javax.media.Renderer;import javax.media.ResourceUnavailableException;import javax.media.Time;import javax.media.TimeBase;import javax.media.Track;import javax.media.UnsupportedPlugInException;import javax.media.control.TrackControl;import javax.media.format.AudioFormat;import javax.media.format.VideoFormat;import javax.media.protocol.ContentDescriptor;import javax.media.protocol.DataSource;import javax.media.renderer.VideoRenderer;import net.sf.fmj.ejmf.toolkit.gui.controlpanel.StandardControlPanel;import net.sf.fmj.filtergraph.DemuxNode;import net.sf.fmj.filtergraph.FilterGraph;import net.sf.fmj.filtergraph.FilterGraphLink;import net.sf.fmj.filtergraph.FilterGraphNode;import net.sf.fmj.filtergraph.MuxNode;import net.sf.fmj.filtergraph.RendererNode;import net.sf.fmj.media.AbstractProcessor;import net.sf.fmj.utility.LoggerSingleton;import com.lti.utils.synchronization.CloseableThread;/** * The main handler for media. Builds a playback filter graph and starts threads to process it. * @author Ken Larson * */public class Handler extends AbstractProcessor{ // we can use this as a player or processor protected static final int PLAYER = 1; protected static final int PROCESSOR = 2; private final int mode; // TODO: AbstractPlayer handles multiple controllers, so what we need to do is create a Controller for each track. private static final Logger logger = LoggerSingleton.logger; private boolean prefetchNeeded = true; private Time duration; private Component visualComponent; private TrackThread[] trackThreads; private static final boolean TRACE = true; private DemuxNode root; private Demultiplexer demux; private int numTracks; private Track[] tracks; private Multiplexer mux; // only for processor private Format[] muxInputFormats; // only for processor private Time demuxDuration = DURATION_UNKNOWN; public Handler() { this(PLAYER); } public Handler(final int mode) { super(); this.mode = mode; } public void setSource(DataSource source) throws IncompatibleSourceException { // setSource and getDuration on a demux at this stage. if (TRACE) logger.fine("DataSource: " + source); // TODO: The original FMJ code would build the entire filter graph here. // this is not what JMF does, JMF builds the filter graph in realize. // The advantage of the old way is that if there were multiple demuxes that // matched, it would try them all. Now, it finds the first demux that it can // successfully set the source on, and uses that. The graph is then built in // realize, and if that fails, the architecture will not try any other demux's for // this handler. // This causes problems where there are demultiplexers that fail to realize. // an example is com.ibm.media.parser.video.MpegParser. The immediate solution // was to de-register that parser for mpeg audio. demux = FilterGraph.getSourceCompatibleDemultiplexer(source); if (demux == null) throw new IncompatibleSourceException("Unable to build filter graph for: " + source); demuxDuration = demux.getDuration(); // JMF calls this at this stage, so we might as well too, and use it in getDuration(). super.setSource(source); } /** can only be called after demux is open and started. */ private boolean getDemuxTracks() { if (tracks != null) return true; try { if (!openAndStartDemux()) return false; tracks = demux.getTracks(); numTracks = tracks.length; } catch (BadHeaderException e) { logger.log(Level.WARNING, "" + e, e); return false; } catch (IOException e) { logger.log(Level.WARNING, "" + e, e); return false; } catch (Exception e) { logger.log(Level.WARNING, "" + e, e); return false; } return true; } private boolean demuxOpenedAndStarted; private boolean openAndStartDemux() { if (demuxOpenedAndStarted) return true; try { // we need to open the demux before we can get the tracks. // we only need the tracks if we are a processor, so perhaps this should be deferred. // this could be done in buildMux, and in doRealize (only needs to be done once) demux.open(); // TODO: should this happen here or in realize? For some demultiplexers, getTracks returns null if it is not open. For example, com.sun.media.parser.RawPullBufferParser (project jipcam) demux.start(); demuxOpenedAndStarted = true; return true; } catch (Exception e) { logger.log(Level.WARNING, "" + e, e); // TODO: not sure if JMF closes the demux in this case. // is it the demux's responsibility to clean up in the event of an exception? or ours? try { closeDemux(); } catch (Throwable t) { logger.log(Level.WARNING, "" + t, t); } return false; } } private void closeDemux() { if (demux != null) demux.close(); demuxOpenedAndStarted = false; } //@Override public void doPlayerClose() { closeDemux(); // TODO logger.info("Handler.doPlayerClose"); } //@Override public boolean doPlayerDeallocate() { logger.info("Handler.doPlayerDeallocate"); return true; } //@Override public boolean doPlayerPrefetch() { if( ! prefetchNeeded ) return true; prefetchNeeded = false; return true; } private int getVideoTrackIndex() { int trackIndex = -1; for (int i = 0; i < root.getTracks().length; ++i) { if (root.getTracks()[i].getFormat() instanceof VideoFormat) { trackIndex = i; break; } } return trackIndex; } private int getAudioTrackIndex() { int trackIndex = -1; for (int i = 0; i < root.getTracks().length; ++i) { if (root.getTracks()[i].getFormat() instanceof AudioFormat) { trackIndex = i; break; } } return trackIndex; } //@Override public boolean doPlayerRealize() { // TODO: in the event of errors, post more specific error events, such as ResourceUnavailableEvent. try { if (!openAndStartDemux()) { postControllerErrorEvent("Failed to openAndStartDemux"); // TODO: we could get a more specific error, like resource not available, from the function. return false; } if (mode == PLAYER) { root = FilterGraph.buildGraphToRenderer(new ContentDescriptor(getSource().getContentType()), demux); } else { buildMux(); int muxTrack = 0; // TODO: hard-coded to track 0 root = FilterGraph.buildGraphToMux(new ContentDescriptor(getSource().getContentType()), demux, mux, muxInputFormats[muxTrack], muxTrack); } } catch (Exception e) { logger.log(Level.WARNING, "" + e, e); closeDemux(); postControllerErrorEvent("" + e); return false; } if (root == null) { logger.fine("unable to find a filter graph to connect from demux to renderer/mux"); // TODO: give details closeDemux(); // TODO: base class says we should post these events. TODO: do this everywhere in FMJ in a controller // where we fail to realize. postControllerErrorEvent("unable to find a filter graph to connect from demux to renderer/mux"); return false; } if (TRACE) { logger.fine("Filter graph:"); FilterGraph.print(root, 1); } final int videoTrackIndex = getVideoTrackIndex(); if (mode == PLAYER && videoTrackIndex >= 0) // if it has a video track { final RendererNode rendererNode = (RendererNode) FilterGraph.getTail(root.getDestLink(videoTrackIndex).getDestNode()); if (rendererNode != null) { final VideoRenderer videoRenderer = (VideoRenderer) rendererNode.getRenderer(); final VideoFormat videoRendererInputFormat = (VideoFormat) rendererNode.getInputFormat(); // TODO: we need to start the demux visualComponent = videoRenderer.getComponent(); visualComponent.setSize(videoRendererInputFormat.getSize()); //logger.fine("Video size: " + videoRendererInputFormat.getSize()); videoRenderer.setBounds(new Rectangle(videoRendererInputFormat.getSize())); } } // TODO: // Sun's AudioRenderer implements Prefetchable, Drainable, Clock // This causes their handler to call some extra methods during initialization. // here we have a somewhat hard-coded attempt to recreate this. // For one, if it is Prefetchable, then we keep passing it buffers while isPrefetching is true, // then call syncStart (which is a Clock method) // TODO: determine which of these is to be called in our prefetch, realize, start, etc. final int audioTrackIndex = getAudioTrackIndex(); if (mode == PLAYER && audioTrackIndex >= 0) // if it has a audio track { final RendererNode rendererNode = (RendererNode) FilterGraph.getTail(root.getDestLink(audioTrackIndex).getDestNode()); if (rendererNode != null) { final Renderer renderer = rendererNode.getRenderer(); if (renderer instanceof Clock) { final Clock rendererAsClock = (Clock) renderer; try { TimeBase timeBase = rendererAsClock.getTimeBase(); // With JMF, this ends up as a com.sun.media.renderer.audio.AudioRenderer$AudioTimeBase@49bdc9d8 // TODO: what do we do in between getting and setting? // probably what we need to do is somehow use this clock as our clock. // TODO: this is starting to make sense to me. An audio renderer differs from a video renderer in that // the audio renderer has to determine time, therefore it is the master clock. The video has to be synched with // the audio, not the other way around. rendererAsClock.setTimeBase(timeBase); // this seems unnecessary, but it does cause the audio renderer to set its master clock. } catch (IncompatibleTimeBaseException e) { logger.log(Level.WARNING, "" + e, e); postControllerErrorEvent("" + e); return false; } } } } try { // root was already opened in setDataSource. // TODO: JMF calls open on the parser during realize for (int i = 0; i < root.getNumDestLinks(); ++i) { final FilterGraphLink link = root.getDestLink(i); if (link != null) FilterGraph.open(link.getDestNode()); } } catch (ResourceUnavailableException e)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -