📄 microphone.java
字号:
audioStream = new AudioInputStream(audioLine); if (doConversion) { audioStream = AudioSystem.getAudioInputStream (desiredFormat, audioStream); assert (audioStream != null); } /* Set the frame size depending on the sample rate. */ float sec = ((float) msecPerRead) / 1000.f; frameSizeInBytes = (audioStream.getFormat().getSampleSizeInBits() / 8) * (int) (sec * audioStream.getFormat().getSampleRate()); logger.info("Frame size: " + frameSizeInBytes + " bytes"); } return true; } else { logger.severe("Can't find microphone"); return false; } } /** * Returns the format of the audio recorded by this Microphone. * Note that this might be different from the configured format. * * @return the current AudioFormat */ public AudioFormat getAudioFormat() { return finalFormat; } /** * Returns the current Utterance. * * @return the current Utterance */ public Utterance getUtterance() { return currentUtterance; } /** * Returns true if this Microphone is recording. * * @return true if this Microphone is recording, false otherwise */ public boolean isRecording() { return recording; } /** * Starts recording audio. This method will return only * when a START event is received, meaning that this Microphone * has started capturing audio. * * @return true if the recording started successfully; false otherwise */ public synchronized boolean startRecording() { if (recording) { return false; } if (!open()) { return false; } utteranceEndReached = false; if (audioLine.isRunning()) { logger.severe("Whoops: audio line is running"); } assert (recorder == null); recorder = new RecordingThread("Microphone"); recorder.start(); recording = true; return true; } /** * Stops recording audio. This method does not return until recording * has been stopped and all data has been read from the audio line. */ public synchronized void stopRecording() { if (audioLine != null) { if (recorder != null) { recorder.stopRecording(); recorder = null; } recording = false; } } /** * This Thread records audio, and caches them in an audio buffer. */ class RecordingThread extends Thread { private boolean done = false; private volatile boolean started = false; private long totalSamplesRead = 0; private Object lock = new Object(); /** * Creates the thread with the given name * * @param name the name of the thread */ public RecordingThread(String name) { super(name); } /** * Starts the thread, and waits for recorder to be ready */ public void start() { started = false; super.start(); waitForStart(); } /** * Stops the thread. This method does not return until recording * has actually stopped, and all the data has been read from * the audio line. */ public void stopRecording() { audioLine.stop(); try { synchronized (lock) { while (!done) { lock.wait(); } } } catch (InterruptedException e) { e.printStackTrace(); } } /** * Implements the run() method of the Thread class. * Records audio, and cache them in the audio buffer. */ public void run() { totalSamplesRead = 0; logger.info("started recording"); if (keepDataReference) { currentUtterance = new Utterance ("Microphone", audioStream.getFormat()); } audioList.add(new DataStartSignal()); logger.info("DataStartSignal added"); try { audioLine.start(); while (!done) { Data data = readData(currentUtterance); if (data == null) { done = true; break; } audioList.add(data); } audioLine.flush(); if (closeBetweenUtterances) { /* Closing the audio stream *should* (we think) * also close the audio line, but it doesn't * appear to do this on the Mac. In addition, * once the audio line is closed, re-opening it * on the Mac causes some issues. The Java sound * spec is also kind of ambiguous about whether a * closed line can be re-opened. So...we'll go * for the conservative route and never attempt * to re-open a closed line. */ audioStream.close(); audioLine.close(); audioLine = null; } } catch (IOException ioe) { logger.warning("IO Exception " + ioe.getMessage()); ioe.printStackTrace(); } long duration = (long) (((double)totalSamplesRead/ (double)audioStream.getFormat().getSampleRate())*1000.0); audioList.add(new DataEndSignal(duration)); logger.info("DataEndSignal ended"); logger.info("stopped recording"); synchronized (lock) { lock.notify(); } } /** * Waits for the recorder to start */ private synchronized void waitForStart() { // note that in theory we coulde use a LineEvent START // to tell us when the microphone is ready, but we have // found that some javasound implementations do not always // issue this event when a line is opened, so this is a // WORKAROUND. try { while (!started) { wait(); } } catch (InterruptedException ie) { logger.warning("wait was interrupted"); } } /** * Reads one frame of audio data, and adds it to the given Utterance. * * @return an Data object containing the audio data */ private Data readData(Utterance utterance) throws IOException { // Read the next chunk of data from the TargetDataLine. byte[] data = new byte[frameSizeInBytes]; int channels = audioStream.getFormat().getChannels(); long collectTime = System.currentTimeMillis(); long firstSampleNumber = totalSamplesRead / channels; int numBytesRead = audioStream.read(data, 0, data.length); // notify the waiters upon start if (!started) { synchronized (this) { started = true; notifyAll(); } } if (logger.isLoggable(Level.FINE)) { logger.info("Read " + numBytesRead + " bytes from audio stream."); } if (numBytesRead <= 0) { return null; } int sampleSizeInBytes = audioStream.getFormat().getSampleSizeInBits() / 8; totalSamplesRead += (numBytesRead / sampleSizeInBytes); if (numBytesRead != frameSizeInBytes) { if (numBytesRead % sampleSizeInBytes != 0) { throw new Error("Incomplete sample read."); } byte[] shrinked = new byte[numBytesRead]; System.arraycopy(data, 0, shrinked, 0, numBytesRead); data = shrinked; } if (keepDataReference) { utterance.add(data); } double[] samples = DataUtil.bytesToValues (data, 0, data.length, sampleSizeInBytes, signed); if (channels > 1) { samples = convertStereoToMono(samples, channels); } return (new DoubleData (samples, (int) audioStream.getFormat().getSampleRate(), collectTime, firstSampleNumber)); } } /** * Converts stereo audio to mono. * * @param samples the audio samples, each double in the array is one sample * @param channels the number of channels in the stereo audio */ private double[] convertStereoToMono(double[] samples, int channels) { assert (samples.length % channels == 0); double[] finalSamples = new double[samples.length/channels]; if (stereoToMono.equals("average")) { for (int i = 0, j = 0; i < samples.length; j++) { double sum = samples[i++]; for (int c = 1; c < channels; c++) { sum += samples[i++]; } finalSamples[j] = sum / channels; } } else if (stereoToMono.equals("selectChannel")) { for (int i = selectedChannel, j = 0; i < samples.length; i += channels, j++) { finalSamples[j] = samples[i]; } } else { throw new Error("Unsupported stereo to mono conversion: " + stereoToMono); } return finalSamples; } /** * Clears all cached audio data. */ public void clear() { audioList = new DataList(); } /** * Reads and returns the next Data object from this * Microphone, return null if there is no more audio data. * All audio data captured in-between <code>startRecording()</code> * and <code>stopRecording()</code> is cached in an Utterance * object. Calling this method basically returns the next * chunk of audio data cached in this Utterance. * * @return the next Data or <code>null</code> if none is * available * * @throws DataProcessingException if there is a data processing error */ public Data getData() throws DataProcessingException { getTimer().start(); Data output = null; if (!utteranceEndReached) { output = (Data) audioList.remove(); if (output instanceof DataEndSignal) { utteranceEndReached = true; } } getTimer().stop(); // signalCheck(output); return output; } /** * Returns true if there is more data in the Microphone. * This happens either if getRecording() return true, or if the * buffer in the Microphone has a size larger than zero. * * @return true if there is more data in the Microphone */ public boolean hasMoreData() { boolean moreData; synchronized (audioList) { moreData = (!utteranceEndReached || audioList.size() > 0); } return moreData; }}/** * Manages the data as a FIFO queue */class DataList { private List list; /** * Creates a new data list */ public DataList() { list = new LinkedList(); } /** * Adds a data to the queue * * @param data the data to add */ public synchronized void add(Data data) { list.add(data); notify(); } /** * Returns the current size of the queue * * @return the size of the queue */ public synchronized int size() { return list.size(); } /** * Removes the oldest item on the queue * * @return the oldest item */ public synchronized Data remove() { try { while (list.size() == 0) { // System.out.println("Waiting..."); wait(); } } catch (InterruptedException ie) { ie.printStackTrace(); } Data data = (Data) list.remove(0); if (data == null) { System.out.println("DataList is returning null."); } return data; }}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -