be.tarsos.dsp.AudioProcessor Java Examples
The following examples show how to use
be.tarsos.dsp.AudioProcessor.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: NCteQStrategy.java From Panako with GNU Affero General Public License v3.0 | 6 votes |
@Override public void monitor(String query, final int maxNumberOfReqults,Set<Integer> avoid, final QueryResultHandler handler) { int samplerate = Config.getInt(Key.NCTEQ_SAMPLE_RATE); int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate; int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate; final ConstantQ constanQ = createConstantQ(); AudioDispatcher d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap); d.addAudioProcessor(new AudioProcessor() { @Override public boolean process(AudioEvent audioEvent) { double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP); processMonitorQuery(audioEvent.getFloatBuffer().clone(), maxNumberOfReqults, handler,timeStamp,constanQ); return true; } @Override public void processingFinished() { } }); d.run(); }
Example #2
Source File: QIFFTAudioFileInfo.java From Panako with GNU Affero General Public License v3.0 | 5 votes |
public void extractInfoFromAudio(final Component componentToRepaint){ int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE); int size = Config.getInt(Key.NFFT_SIZE); int overlap = size - Config.getInt(Key.NFFT_STEP_SIZE); StopWatch w = new StopWatch(); w.start(); d = AudioDispatcherFactory.fromPipe(audioFile.getAbsolutePath(), samplerate, size, overlap); eventPointProcessor = new QIFFTEventPointProcessor(size,overlap,samplerate,4); d.addAudioProcessor(eventPointProcessor); d.addAudioProcessor(this); d.addAudioProcessor(new AudioProcessor() { @Override public void processingFinished() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { componentToRepaint.repaint(); } }); if(referenceFileInfo!=null) referenceFileInfo.setMatchingFingerprints(matchingPrints); } @Override public boolean process(AudioEvent audioEvent) { return true; } }); new Thread(d).start(); }
Example #3
Source File: NFFTAudioFileInfo.java From Panako with GNU Affero General Public License v3.0 | 5 votes |
public void extractInfoFromAudio(final Component componentToRepaint){ int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE); int size = Config.getInt(Key.NFFT_SIZE); int overlap = size - Config.getInt(Key.NFFT_STEP_SIZE); StopWatch w = new StopWatch(); w.start(); d = AudioDispatcherFactory.fromPipe(audioFile.getAbsolutePath(), samplerate, size, overlap); eventPointProcessor = new NFFTEventPointProcessor(size,overlap,samplerate); d.addAudioProcessor(eventPointProcessor); d.addAudioProcessor(this); d.addAudioProcessor(new AudioProcessor() { @Override public void processingFinished() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { componentToRepaint.repaint(); } }); if(referenceAudioFileInfo!=null) referenceAudioFileInfo.setMatchingFingerprints(matchingPrints); } @Override public boolean process(AudioEvent audioEvent) { return true; } }); new Thread(d).start(); }
Example #4
Source File: RafsRepStrategy.java From Panako with GNU Affero General Public License v3.0 | 5 votes |
@Override public void monitor(String query, int maxNumberOfReqults, Set<Integer> avoid, QueryResultHandler handler) { int samplerate = Config.getInt(Key.RAFS_SAMPLE_RATE); int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate; int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate; AudioDispatcher d ; if (query.equals(Panako.DEFAULT_MICROPHONE)){ try { d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap); } catch (LineUnavailableException e) { LOG.warning("Could not connect to default microphone!" + e.getMessage()); e.printStackTrace(); d = null; } }else{ d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap); } d.setZeroPadFirstBuffer(true); d.addAudioProcessor(new AudioProcessor() { @Override public boolean process(AudioEvent audioEvent) { double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP); processMonitorQuery(audioEvent.getFloatBuffer().clone(), handler,timeStamp,avoid); return true; } @Override public void processingFinished() { } }); d.run(); }
Example #5
Source File: NFFTStrategy.java From Panako with GNU Affero General Public License v3.0 | 5 votes |
public void monitor(String query,final SerializedFingerprintsHandler handler){ int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE); int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate; int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate; AudioDispatcher d ; if (query.equals(Panako.DEFAULT_MICROPHONE)){ try { d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap); } catch (LineUnavailableException e) { LOG.warning("Could not connect to default microphone!" + e.getMessage()); e.printStackTrace(); d = null; } }else{ d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap); } d.addAudioProcessor(new AudioProcessor() { @Override public boolean process(AudioEvent audioEvent) { double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP); processMonitorQueryToSerializeFingerprints(audioEvent.getFloatBuffer().clone(), handler,timeStamp); return true; } @Override public void processingFinished() { } }); d.run(); }
Example #6
Source File: NFFTStrategy.java From Panako with GNU Affero General Public License v3.0 | 5 votes |
@Override public void monitor(String query,final int maxNumberOfResults,Set<Integer> avoid, final QueryResultHandler handler) { int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE); int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate; int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate; AudioDispatcher d ; if (query.equals(Panako.DEFAULT_MICROPHONE)){ try { d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap); } catch (LineUnavailableException e) { LOG.warning("Could not connect to default microphone!" + e.getMessage()); e.printStackTrace(); d = null; } }else{ d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap); } d.addAudioProcessor(new AudioProcessor() { @Override public boolean process(AudioEvent audioEvent) { double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP); processMonitorQuery(audioEvent.getFloatBuffer().clone(), maxNumberOfResults, handler,timeStamp,avoid); return true; } @Override public void processingFinished() { } }); d.run(); }
Example #7
Source File: SyncSinkTests.java From Panako with GNU Affero General Public License v3.0 | 5 votes |
@Test public void testPipeDecoder(){ File reference = TestUtilities.getResource("dataset/61198.wav"); File referenceFile = TestUtilities.getResource("dataset/61198.wav"); final float[] referenceBuffer = TestUtilities.getAudioBuffer(reference,1.0,1.5); AudioDispatcher d = AudioDispatcherFactory.fromPipe(referenceFile.getAbsolutePath(), 44100, 22050, 0,1.0,0.5); d.addAudioProcessor(new AudioProcessor() { boolean ran = false; @Override public void processingFinished() { } @Override public boolean process(AudioEvent audioEvent) { if(!ran){ float[] otherBuffer = audioEvent.getFloatBuffer(); assertEquals("Buffers should be equal in length", referenceBuffer.length, otherBuffer.length); for(int i = 0 ; i < otherBuffer.length; i++){ assertEquals("Buffers should have the same content", referenceBuffer[i], otherBuffer[i],0.0000001); } } ran = true; return true; } }); d.run(); }
Example #8
Source File: TestUtilities.java From Panako with GNU Affero General Public License v3.0 | 5 votes |
public static float[] getAudioBuffer(File file,double start,double stop){ double sampleRate = 44100; int sampleStart = (int) Math.round(sampleRate * start); int sampleStop = (int) Math.round(sampleRate * stop); int diff = sampleStop - sampleStart; final float[] audioBuffer = new float[diff]; AudioDispatcher d; d = AudioDispatcherFactory.fromPipe(file.getAbsolutePath(), 44100,diff, 0); d.skip(start); d.addAudioProcessor(new AudioProcessor() { boolean filled = false; @Override public void processingFinished() { } @Override public boolean process(AudioEvent audioEvent) { if(!filled){ for (int i = 0; i < audioEvent.getFloatBuffer().length; i++) { audioBuffer[i] = audioEvent.getFloatBuffer()[i]; } filled = true; } return false; } }); d.run(); return audioBuffer; }
Example #9
Source File: RecordingMfccService.java From android-speaker-audioanalysis with MIT License | 2 votes |
public void startMfccExtraction() { //MFCC( samplesPerFrame, sampleRate ) //typical samplesperframe are power of 2 & Samples per frame = (sample rate)/FPS //Florian suggested to use 16kHz as sample rate and 512 for frame size final MFCC mfccObj = new MFCC(samplesPerFrame, sampleRate, amountOfCepstrumCoef, amountOfMelFilters, lowerFilterFreq, upperFilterFreq ); //(1024,22050); /*AudioProcessors are responsible for actual digital signal processing. AudioProcessors are meant to be chained e.g. execute an effect and then play the sound. The chain of audio processor can be interrupted by returning false in the process methods. */ dispatcher.addAudioProcessor( mfccObj); //handlePitchDetection(); dispatcher.addAudioProcessor(new AudioProcessor() { @Override public void processingFinished() { // TODO Auto-generated method stub //Notify the AudioProcessor that no more data is available and processing has finished } @Override public boolean process(AudioEvent audioEvent) { // TODO Auto-generated method stub //process the audio event. do the actual signal processing on an (optionally) overlapping buffer //fetchng MFCC array and removing the 0th index because its energy coefficient and florian asked to discard float[] mfccOutput = mfccObj.getMFCC(); mfccOutput = Arrays.copyOfRange(mfccOutput, 1, mfccOutput.length); //Storing in global arraylist so that i can easily transform it into csv mfccList.add(mfccOutput); Log.i("MFCC", String.valueOf(Arrays.toString(mfccOutput))); return true; } }); //its better to use thread vs asynctask here. ref : http://stackoverflow.com/a/18480297/1016544 new Thread(dispatcher, "Audio Dispatcher").start(); }