📄 basicplayer.java
字号:
m_audioFileFormat = AudioSystem.getAudioFileFormat(url); } /** * Inits Audio ressources from InputStream. */ protected void initAudioInputStream(InputStream inputStream) throws UnsupportedAudioFileException, IOException { m_audioInputStream = AudioSystem.getAudioInputStream(inputStream); m_audioFileFormat = AudioSystem.getAudioFileFormat(inputStream); } /** * Inits Audio ressources from AudioSystem.<br> */ protected void initLine() throws LineUnavailableException { log.info("initLine()"); if (m_line == null) { createLine(); } if (!m_line.isOpen()) { openLine(); } else { AudioFormat lineAudioFormat = m_line.getFormat(); AudioFormat audioInputStreamFormat = m_audioInputStream == null ? null : m_audioInputStream.getFormat(); if (!lineAudioFormat.equals(audioInputStreamFormat)) { m_line.close(); openLine(); } } } /** * Inits a DateLine.<br> * * We check if the line supports Gain and Pan controls. * * From the AudioInputStream, i.e. from the sound file, we * fetch information about the format of the audio data. These * information include the sampling frequency, the number of * channels and the size of the samples. There information * are needed to ask JavaSound for a suitable output line * for this audio file. * Furthermore, we have to give JavaSound a hint about how * big the internal buffer for the line should be. Here, * we say AudioSystem.NOT_SPECIFIED, signaling that we don't * care about the exact size. JavaSound will use some default * value for the buffer size. */ protected void createLine() throws LineUnavailableException { log.info("Create Line"); if (m_line == null) { AudioFormat sourceFormat = m_audioInputStream.getFormat(); log.info("Create Line : Source format : " + sourceFormat.toString()); int nSampleSizeInBits = sourceFormat.getSampleSizeInBits(); if (nSampleSizeInBits <= 0) { nSampleSizeInBits = 16; } if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW) || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW)) { nSampleSizeInBits = 16; } if (nSampleSizeInBits != 8) { nSampleSizeInBits = 16; } AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), nSampleSizeInBits, sourceFormat.getChannels(), sourceFormat.getChannels() * (nSampleSizeInBits / 8), sourceFormat.getSampleRate(), false); log.info("Create Line : Target format: " + targetFormat); // Keep a reference on encoded stream to progress notification. m_encodedaudioInputStream = m_audioInputStream; try { // Get total length in bytes of the encoded stream. encodedLength = m_encodedaudioInputStream.available(); } catch (IOException e) { log.log(Level.SEVERE, "Cannot get m_encodedaudioInputStream.available()", e); } // Create decoded stream. m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream); AudioFormat audioFormat = m_audioInputStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, -1); Mixer mixer = getMixer(m_mixerName); if (mixer != null) { log.info("Mixer : " + mixer.getMixerInfo().toString()); m_line = (SourceDataLine) mixer.getLine(info); } else { m_line = (SourceDataLine) AudioSystem.getLine(info); m_mixerName = null; } log.info("Line : " + m_line.toString()); log.info("Line Info : " + m_line.getLineInfo().toString()); log.info("Line AudioFormat: " + m_line.getFormat().toString()); } } /** * Opens the line. */ protected void openLine() throws LineUnavailableException { if (m_line != null) { AudioFormat audioFormat = m_audioInputStream.getFormat(); int buffersize = lineBufferSize; if (buffersize <= 0) { buffersize = m_line.getBufferSize(); } m_lineCurrentBufferSize = buffersize; m_line.open(audioFormat, buffersize); log.info("Open Line : BufferSize=" + buffersize); /*-- Display supported controls --*/ Control[] c = m_line.getControls(); for (int p = 0; p < c.length; p++) { log.info("Controls : " + c[p].toString()); } /*-- Is Gain Control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN)) { m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN); log.info("Master Gain Control : [" + m_gainControl.getMinimum() + "," + m_gainControl.getMaximum() + "] " + m_gainControl.getPrecision()); } /*-- Is Pan control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.PAN)) { m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN); log.info("Pan Control : [" + m_panControl.getMinimum() + "," + m_panControl.getMaximum() + "] " + m_panControl.getPrecision()); } } } /** * Stops the playback.<br> * * Player Status = STOPPED.<br> * Thread should free Audio ressources. */ protected void stopPlayback() { if ((m_status == PLAYING) || (m_status == PAUSED)) { if (m_line != null) { m_line.flush(); m_line.stop(); } m_status = STOPPED; synchronized (m_audioInputStream) { m_audioInputStream.notifyAll(); } notifyEvent(BasicPlayerEvent.STOPPED, getEncodedStreamPosition(), -1, null); synchronized (m_audioInputStream) { closeStream(); } log.info("stopPlayback() completed"); } } /** * Pauses the playback.<br> * * Player Status = PAUSED. */ protected void pausePlayback() { if (m_line != null) { if (m_status == PLAYING) {// m_line.flush(); m_line.stop(); m_status = PAUSED; log.info("pausePlayback() completed"); notifyEvent(BasicPlayerEvent.PAUSED, getEncodedStreamPosition(), -1, null); } } } /** * Resumes the playback.<br> * * Player Status = PLAYING. */ protected void resumePlayback() { if (m_line != null) { if (m_status == PAUSED) { m_line.start(); m_status = PLAYING; synchronized (m_audioInputStream) { m_audioInputStream.notifyAll(); } log.info("resumePlayback() completed"); notifyEvent(BasicPlayerEvent.RESUMED, getEncodedStreamPosition(), -1, null); } } } /** * Starts playback. */ protected void startPlayback() throws BasicPlayerException { if (m_status == STOPPED) { initAudioInputStream(); } if (m_status == OPENED) { log.info("startPlayback called"); if (!(m_thread == null || !m_thread.isAlive())) { log.warning("WARNING: old thread still running!!"); int cnt = 0; while (m_status != OPENED) { try { if (m_thread != null) { log.info("Waiting ... " + cnt); cnt++; Thread.sleep(1000); if (cnt > 2) { m_thread.interrupt(); } } } catch (InterruptedException e) { throw new BasicPlayerException(BasicPlayerException.WAITERROR, e); } } } // Open SourceDataLine. try { initLine(); } catch (LineUnavailableException e) { throw new BasicPlayerException(BasicPlayerException.CANNOTINITLINE, e); } log.info("Creating new thread"); m_thread = new Thread(this, "BasicPlayer"); m_thread.start(); if (m_line != null) { m_line.start(); m_status = PLAYING; synchronized (m_audioInputStream) { m_audioInputStream.notifyAll(); } notifyEvent(BasicPlayerEvent.PLAYING, getEncodedStreamPosition(), -1, null); } } } /** * Main loop. * * Player Status == STOPPED || SEEKING => End of Thread + Freeing Audio Ressources.<br> * Player Status == PLAYING => Audio stream data sent to Audio line.<br> * Player Status == PAUSED => Waiting for another status. */ public void run() { log.info("Thread Running"); int nBytesRead = 1; byte[] abData = new byte[EXTERNAL_BUFFER_SIZE]; int readIndex = 0;//所有读进缓冲区的数量 int writeIndex = 0;//所有写出数量 // Lock stream while playing. synchronized (m_audioInputStream) { boolean buffering = false; // Main play/pause loop. while ((nBytesRead != -1) && (m_status != STOPPED) && (m_status != SEEKING) && (m_status != UNKNOWN)) { if (m_status == PLAYING) { // Play. try { nBytesRead = m_audioInputStream.read(abData, 0, abData.length); if (nBytesRead >= 0) { byte[] pcm = new byte[nBytesRead]; System.arraycopy(abData, 0, pcm, 0, nBytesRead); if (m_line.available() >= m_line.getBufferSize()) {// buffering = true; log.info("缓冲区空虚 : " + m_line.available() + "/" + m_line.getBufferSize()); }// if(m_line.available()==0){// buffering=false;// } if (buffering == false) { int nBytesWritten = m_line.write(abData, 0, nBytesRead); // Compute position in bytes in encoded stream. int nEncodedBytes = getEncodedStreamPosition(); // Notify listeners Iterator<BasicPlayerListener> it = laucher.getBasicPlayerListeners().iterator(); while (it.hasNext()) { BasicPlayerListener bpl = it.next(); if (m_audioInputStream instanceof PropertiesContainer) { // Pass audio parameters such as instant bitrate, ... Map properties = ((PropertiesContainer) m_audioInputStream).properties(); bpl.progress(nEncodedBytes, m_line.getMicrosecondPosition(), pcm, properties); } else { bpl.progress(nEncodedBytes, m_line.getMicrosecondPosition(), pcm, empty_map); } } } } } catch (IOException e) { log.log(Level.SEVERE, "Thread cannot run()", e); m_status = STOPPED; notifyEvent(BasicPlayerEvent.STOPPED, getEncodedStreamPosition(), -1, null); } // Nice CPU usage. if (threadSleep > 0) { try { Thread.sleep(threadSleep); } catch (InterruptedException e) { log.log(Level.SEVERE, "Thread cannot sleep(" + threadSleep + ")", e); } } } else { synchronized (m_audioInputStream) { try { log.log(Level.INFO, "状态是不正在播放,要无限期的等待了....."); m_audioInputStream.wait(); log.log(Level.INFO, "状态改过来了,等待被唤醒了......."); } catch (InterruptedException ex) { Logger.getLogger(BasicPlayer.class.getName()).log(Level.SEVERE, null, ex); } } // Pause// try {// Thread.sleep(500);// } catch (InterruptedException e) {// log.log(Level.SEVERE, "Thread cannot sleep(500)", e);// } } } // Free audio resources. if (m_line != null) { m_line.drain(); m_line.stop(); m_line.close();
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -