Submit
Browse
Anonymous
Login
RSS
SuperCollider Code
Fork Code: Convolution reverb and impulse responses
name
code content
// Posted by Paul Miller // https://listarc.cal.bham.ac.uk/lists/sc-users-2016/msg52187.html // code for building & testing IRs // setup ( s.waitForBoot({ { // init // free old buffers if (a.notNil, { if (a.buffer.notNil, { a.buffer.free; }); if (a.target.notNil, { a.target.free; }); if (a.irspectrum.notNil, { a.irspectrum.free; }); }); a = (); // settings dict //fft a.fftsize = 2048; // set target buffer for testing convolution with a.target= Buffer.read(s, Platform.resourceDir +/+ "sounds/a11wlk01.wav"); // fill buffer using one of the following Examples: /* // ======= EX1 - create IR using dust & noise ====== */ // CHANGE THESE SETTINGS: a.bufferTime = 3; // time in secs for new IR a.densityMin = 50; // min/max density of reflections a.densityMax = 500; a.filterFreqMin = 100; // min/max freq for filter sweep a.filterFreqMax = 8000; if (a.buffer.notNil, { a.buffer.free; }); a.buffer = Buffer.alloc(s, s.sampleRate * a.bufferTime, 1); s.sync; // write into the buffer with a BufWr y = { var sourceVolEnv, filterFreqEnv, densityEnv, source, filterFunc, outSound; // CHANGE ENVELOPES, SOURCE & FILTER SETTINGS //sourceVolEnv = EnvGen.kr(Env([1, 0], [a.bufferTime], -3), doneAction: 2); //sourceVolEnv = EnvGen.kr(Env([0, 1], [a.bufferTime], 3), doneAction: 2); // reverse env sourceVolEnv = EnvGen.kr(Env([0.2, 1], [a.bufferTime], 6), doneAction: 2); // reverse env // filterFreqEnv = EnvGen.kr(Env([1, 0], [a.bufferTime], -2), // levelScale: (a.filterFreqMax - a.filterFreqMin), levelBias: a.filterFreqMin, doneAction: 0); filterFreqEnv = EnvGen.kr(Env([0, 1], [a.bufferTime], 6), levelScale: (a.filterFreqMax - a.filterFreqMin), levelBias: a.filterFreqMin, doneAction: 0); // reverse env // densityEnv = EnvGen.kr(Env([0, 1], [a.bufferTime], 2), // levelScale: (a.densityMax - a.densityMin), levelBias: a.densityMin, doneAction: 0); densityEnv = EnvGen.kr(Env([1, 0], [a.bufferTime], -2), levelScale: (a.densityMax - a.densityMin), levelBias: a.densityMin, doneAction: 0); // reverse env source = Dust.ar(densityEnv, 0.5) * WhiteNoise.ar; // source = Dust.ar(densityEnv, 0.5) * PinkNoise.ar; // source = Dust.ar(densityEnv, 0.5) * GrayNoise.ar; // filterFunc = {arg in, freq; LPF.ar(in, freq) }; // filterFunc = {arg in, freq; HPF.ar(in, freq) }; filterFunc = {arg in, freq; var allPass; //LPF.ar(in + AllpassC.ar(in, 0.1, 0.01, 0.5, mul: 0.75), freq); //LPF.ar(in + AllpassC.ar(in, 0.1, 0.1, 0.5, mul: 0.75), freq); //LPF.ar(in + AllpassC.ar(in, 0.01, XLine.kr(0.0001, 0.01, a.bufferTime), 0.2), freq); LPF.ar(in + AllpassC.ar(in * 0.5, 0.2, XLine.kr(0.001, 0.2, a.bufferTime), 0.5, mul: 0.5), freq); }; outSound = filterFunc.value(sourceVolEnv * source, filterFreqEnv) * 100 / densityEnv; // adjust volume with density // record buffer BufWr.ar(outSound, a.buffer, Phasor.ar(0, BufRateScale.kr(a.buffer), 0, BufFrames.kr(a.buffer)), loop: 0); outSound ! 2; }.play; // ======= /* // ======= EX2 - load file from disk - MONO ONLY ====== if (a.buffer.notNil, { a.buffer.free; }); a.buffer = Buffer.read(s, "/path/to/monoSoundFile.wav"); // e.g. //a.buffer = Buffer.read(s, "/Users/paul/TESTING/TestSamples/_impulses/SavedImpulse_20160831222540.aif"); // ======= */ /* // ======= EX3 - load file from disk & reverse it - MONO ONLY ====== if (a.buffer.notNil, { a.buffer.free; }); // a.buffer = Buffer.read(s, "/path/to/monoSoundFile.wav"); // e.g. // a.buffer = Buffer.read(s, "/Users/paul/TESTING/TestSamples/_impulses/LexiconHallMono1.wav"); a.buffer = Buffer.read(s, "/Users/paul/TESTING/TestSamples/_impulses/FactoryHallMono.wav"); s.sync; a.buffer.loadToFloatArray(action: { arg array; a.floatArray = array.reverse; }); s.sync; 2.wait; a.buffer.free; a.buffer = Buffer.loadCollection(s, a.floatArray); */ // ======= /* // ======= EX4 - build buffer from floatArray - taken from PartConv help file // synthesise the honourable 'Dan Stowell' impulse response a.ir = ([1] ++0.dup(100) ++ ((1, 0.99998 .. 0).collect{|f| f = f.squared.squared; f = if(f.coin){0}{f.squared}; f = if(0.5.coin){0-f}{f} } * 0.1) ).normalizeSum; a.ir = a.ir.reverse; // added for backwards reverb if (a.buffer.notNil, { a.buffer.free; }); a.buffer = Buffer.loadCollection(s, a.ir); // a.ir.plot; */ // ======= s.sync; "Setup Completed".postln; }.fork; }); ) ////////////////////////////////////////////////////// // prepare irspectrum buffer ( { s.sync; a.bufsize = PartConv.calcBufSize(a.fftsize, a.buffer); if (a.irspectrum.notNil, { a.irspectrum.free; }); a.irspectrum = Buffer.alloc(s, a.bufsize, 1); a.irspectrum.preparePartConv(a.buffer, a.fftsize); s.sync; "Prepare Buffer Completed".postln; }.fork; ) ////////////////////////////////////////////////////// // examples to use the IR - all are mono, but duplicated in left/right speaker (using ! 2) // convolve with target soundfile ( { var input, convLevel = 0.6; input = PlayBuf.ar(1, a.target.bufnum, loop:1); Out.ar(0, (convLevel * PartConv.ar(input, a.fftsize, a.irspectrum.bufnum, 0.5)) ! 2); }.play; ) // convolve with target soundfile adding input ( { var input, delayTime, inputDelay, inputLevel = 0.5, convLevel = 0.5; input = PlayBuf.ar(1, a.target, loop:1); Out.ar(0, ((inputLevel * input) + (convLevel * PartConv.ar(input, a.fftsize, a.irspectrum.bufnum, 0.5))) ! 2); }.play; ) // convolve with target soundfile adding delayed input ( { var input, delayTime, inputDelay, inputLevel = 0.5, convLevel = 0.5; input = PlayBuf.ar(1, a.target, loop:1); delayTime = BufFrames.ir(a.buffer.bufnum) / BufSampleRate.ir(a.buffer.bufnum); inputDelay = DelayC.ar(input * inputLevel, delayTime, delayTime); Out.ar(0, (inputDelay + (convLevel * PartConv.ar(input, a.fftsize, a.irspectrum.bufnum, 0.5))) ! 2); }.play; ) // convolve with live input ( { var input, convLevel = 0.8; input= SoundIn.ar(0); Out.ar(0, (convLevel * PartConv.ar(input, a.fftsize, a.irspectrum.bufnum)) ! 2); }.play; ) // convolve with live input adding input ( { var input, inputLevel = 0.7, convLevel = 0.7; input = SoundIn.ar(0); Out.ar(0, ((inputLevel * input) + (convLevel * PartConv.ar(input, a.fftsize, a.irspectrum.bufnum))) ! 2); }.play; ) // convolve with live input adding delayed input ( { var input, delayTime, delayedInput, inputLevel = 0.7, convLevel = 0.7; input = SoundIn.ar(0); delayTime = BufFrames.ir(a.buffer.bufnum) / BufSampleRate.ir(a.buffer.bufnum); delayedInput = DelayC.ar(input, delayTime, delayTime); Out.ar(0, ((inputLevel * delayedInput) + (convLevel * PartConv.ar(input, a.fftsize, a.irspectrum.bufnum))) ! 2); }.play; ) ////////////////////////////////////////////////////// // save ir buffer to disk ( a.savedBufferName = "/Users/paul/TESTING/TestSamples/_impulses/SavedImpulse_" // e.g. ++ Date.getDate.asSortableString ++ ".aif"; a.buffer.write(a.savedBufferName, headerFormat: "aiff", sampleFormat: "int24"); ("Saved buffer to disk. filename: " ++ a.savedBufferName).postln; ) ////////////////////////////////////////////////////// // free buffers when finished ( if (a.notNil, { if (a.buffer.notNil, { a.buffer.free; }); if (a.target.notNil, { a.target.free; }); if (a.irspectrum.notNil, { a.irspectrum.free; }); }); )
code description
Boilerplate code for creating and testing convolution reverb and impulse responses
use markdown for formating
category tags
comma separated, i.g. "wild, siren" (do not enter default SC class names, please)
ancestor(s)
comma separated identificators, i.g. "1-C,1-1,1-4M,1-x"
Private?
the code will be accessible by direct url and not visible in public activity
signup to submit public code without captcha
comment of change