fixed default bufnmf and nmfmatch, and ported all nmfmatch examples but one

nix
Pierre Alexandre Tremblay 7 years ago
parent a3dab0b844
commit 3e5a82fb54

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

@ -1,5 +1,5 @@
FluidBufNMF {
*process { arg server, srcBufNum, startAt = 0, nFrames = -1, startChan = 0, nChans = -1, dstBufNum, dictBufNum, dictFlag = 0, actBufNum, actFlag = 0, rank = 1, nIter = 100, sortFlag = 0, winSize = 1024, hopSize = 256, fftSize = -1, winType = 0, randSeed = -1;
*process { arg server, srcBufNum, startAt = 0, nFrames = -1, startChan = 0, nChans = -1, dstBufNum, dictBufNum, dictFlag = 0, actBufNum, actFlag = 0, rank = 1, nIter = 100, sortFlag = 0, winSize = 1024, hopSize = 512, fftSize = -1, winType = 0, randSeed = -1;
if(srcBufNum.isNil) { Error("Invalid buffer").format(thisMethod.name, this.class.name).throw};

@ -1,6 +1,6 @@
FluidNMFMatch : MultiOutUGen {
*kr { arg in = 0, dictBufNum, maxRank = 1, nIter = 10, winSize = 1024, hopSize = 256, fftSize = -1;
*kr { arg in = 0, dictBufNum, maxRank = 1, nIter = 10, winSize = 1024, hopSize = 512, fftSize = -1;
^this.multiNew('control', in, dictBufNum, maxRank, nIter, winSize, hopSize, fftSize);
}

@ -110,6 +110,58 @@ RETURNS::
EXAMPLES::
STRONG::A didactic example::
CODE::
(
// create buffers
b = Buffer.alloc(s,44100);
c = Buffer.alloc(s, 44100);
d = Buffer.new(s);
e = Buffer.new(s);
f = Buffer.new(s);
g = Buffer.new(s);
)
(
// fill them with 2 clearly segregated sine waves and composite a buffer where they are consecutive
Routine {
b.sine2([500],[1], false, false);
c.sine2([5000],[1],false, false);
s.sync;
FluidBufCompose.process(s,srcBufNumA:b.bufnum, srcBufNumB:c.bufnum,dstStartAtB:44100,dstBufNum:d.bufnum);
s.sync;
d.query;
}.play;
)
// check
d.plot
d.play //////(beware !!!! loud!!!)
(
// separate them in 2 ranks
Routine {
FluidBufNMF.process(s, d.bufnum, dstBufNum:e.bufnum, dictBufNum: f.bufnum, actBufNum:g.bufnum, rank:2);
s.sync;
e.query;
f.query;
g.query;
}.play
)
// look at the resynthesised separated signal
e.plot;
// look at the dictionaries signal for 2 spikes
f.plot;
// look at the activations
g.plot;
//trying running the same process on superimposed sinewaves instead of consecutive in the source and see how it fails.
::
STRONG::Basic musical examples::
code::
// set some buffers and parameters

@ -48,11 +48,58 @@ RETURNS::
EXAMPLES::
STRONG::A didactic example::
CODE::
(
// create buffers
b= Buffer.alloc(s,44100);
c = Buffer.alloc(s, 44100);
d = Buffer.new(s);
e= Buffer.new(s);
)
(
// fill them with 2 clearly segregated sine waves and composite a buffer where they are consecutive
Routine {
b.sine2([500],[1], false, false);
c.sine2([5000],[1],false, false);
s.sync;
FluidBufCompose.process(s,srcBufNumA:b.bufnum, srcBufNumB:c.bufnum,dstStartAtB:44100,dstBufNum:d.bufnum);
s.sync;
d.query;
}.play;
)
// check
d.plot
d.play //////(beware !!!! loud!!!)
(
// separate them in 2 ranks
Routine {
FluidBufNMF.process(s, d.bufnum, dictBufNum: e.bufnum, rank:2);
s.sync;
e.query;
}.play
)
// check for 2 spikes in the spectra
e.query
e.plot
// test the activations values with test one, another, or both ideal material
{FluidNMFMatch.kr(SinOsc.ar(500),e.bufnum,2, hopSize:512)}.plot(1)
{FluidNMFMatch.kr(SinOsc.ar(5000),e.bufnum,2, hopSize:512)}.plot(1)
{FluidNMFMatch.kr(SinOsc.ar([500,5000]).sum,e.bufnum,2, hopSize:512)}.plot(1)
::
STRONG::A pick compressor::
CODE::
//set some buffers
(
b = Buffer.read(s,File.realpath(FluidBufNMF.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-AaS-AcousticStrums-M.wav");
b = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-AaS-AcousticStrums-M.wav");
c = Buffer.new(s);
x = Buffer.new(s);
e = Buffer.alloc(s,1,1);
@ -67,9 +114,10 @@ Routine {
}.play;
)
// find the rank that has the picking sound by changing which channel to listen to
// wait for the query to print
// then find the rank that has the picking sound by changing which channel to listen to
(
~element = 4;
~element = 8;
{PlayBuf.ar(10,c.bufnum)[~element]}.play
)
@ -89,30 +137,177 @@ Routine{
e.plot;
//using this trained dictionary we can see the envelop (activations) of each rank
{FluidNMFMatch.kr(PlayBuf.ar(1,b.bufnum),e.bufnum,2,fftSize:2048)}.plot(10);
{FluidNMFMatch.kr(PlayBuf.ar(1,b.bufnum),e.bufnum,2,fftSize:2048)}.plot(1);
// the left/top activations are before, the pick before the sustain.
//we can then use the activation value to sidechain a compression patch that is sent in a delay
(
{
var source, todelay, delay1, delay2, delay3;
source = PlayBuf.ar(1,b.bufnum);
todelay = DelayN.ar(source,0.1, 256/44100, Ramp.ar(K2A.ar(FluidNMFMatch.kr(source,e.bufnum,2)[0]),512/44100));
var source, todelay, delay1, delay2, delay3, feedback, mod1, mod2, mod3, mod4;
//read the source
source = PlayBuf.ar(1, b.bufnum);
// generate modulators that are coprime in frequency
mod1 = SinOsc.ar(1, 0, 0.001);
mod2 = SinOsc.ar(((617 * 181) / (461 * 991)), 0, 0.001);
mod3 = SinOsc.ar(((607 * 193) / (491 * 701)), 0, 0.001);
mod4 = SinOsc.ar(((613 * 191) / (463 * 601)), 0, 0.001);
// compress the signal to send to the delays
todelay = DelayN.ar(source,0.1, 800/44100, //delaying it to compensate for FluidNMFMatch's latency
LagUD.ar(K2A.ar(FluidNMFMatch.kr(source,e.bufnum,2,fftSize:2048)[0]), //reading the channel of the activations on the pick dictionary
80/44100, // lag uptime (compressor's attack)
1000/44100, // lag downtime (compressor's decay)
(1/(2.dbamp) // compressor's threshold inverted
)).clip(1,1000).pow((8.reciprocal)-1)); //clipping it so we only affect above threshold, then ratio(8) becomes the exponent of that base
// delay network
feedback = LocalIn.ar(3);// take the feedback in for the delays
delay1 = DelayC.ar(BPF.ar(todelay+feedback[1]+(feedback[2] * 0.3), 987, 6.7,0.8),0.123,0.122+(mod1*mod2));
delay2 = DelayC.ar(BPF.ar(todelay+feedback[0]+(feedback[2] * 0.3), 1987, 6.7,0.8),0.345,0.344+(mod3*mod4));
delay3 = DelayC.ar(BPF.ar(todelay+feedback[1], 1456, 6.7,0.8),0.567,0.566+(mod1*mod3),0.6);
LocalOut.ar([delay1,delay2, delay3]); // write the feedback for the delays
//listen to the delays only by uncommenting the following line
// [delay1+delay3,delay2+delay3]
source.dup + ([delay1+delay3,delay2+delay3]*(-3.dbamp))
}.play;
)
::
STRONG::Object finder::
CODE::
//indeed
/set some buffers
(
b = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-BaB-SoundscapeGolcarWithDog.wav");
c = Buffer.new(s);
x = Buffer.new(s);
e = Buffer.alloc(s,1,1);
)
// train where all objects are present
(
Routine {
FluidBufNMF.process(s,b.bufnum,130000,150000,0,1, c.bufnum, x.bufnum, rank:10);
s.sync;
c.query;
}.play;
)
// wait for the query to print
// then find a rank for each item you want to find. You could also sum them. Try to find a rank with a good object-to-rest ratio
(
~dog =0;
{PlayBuf.ar(10,c.bufnum)[~dog]}.play
)
(
~bird = 5;
{PlayBuf.ar(10,c.bufnum)[~bird]}.play
)
// copy at least one other rank to a third rank, a sort of left-over channel
(
Routine{
FluidBufCompose.process(s,srcBufNumA: x.bufnum, startChanA:~dog, nChansA: 1, srcBufNumB: e.bufnum, dstBufNum: e.bufnum);
FluidBufCompose.process(s,srcBufNumA: x.bufnum, startChanA:~bird, nChansA: 1, dstStartChanA: 1, srcBufNumB: e.bufnum, dstBufNum: e.bufnum);
s.sync;
(0..9).removeAll([~dog,~bird]).do({|chan|FluidBufCompose.process(s,srcBufNumA: x.bufnum, startChanA:chan, nChansA: 1, dstStartChanA: 2, srcBufNumB: e.bufnum, dstBufNum: e.bufnum)});
s.sync;
e.query;
}.play;
)
e.plot;
//using this trained dictionary we can then see the activation...
(
{
var source, blips;
//read the source
source = PlayBuf.ar(2, b.bufnum);
blips = FluidNMFMatch.kr(source.sum,e.bufnum,3);
}.plot(10);
)
// ...and use some threshold to 'find' objects...
(
{
var source, blips;
//read the source
source = PlayBuf.ar(2, b.bufnum);
blips = Schmidt.kr(FluidNMFMatch.kr(source.sum,e.bufnum,3),0.5,[10,1,1000]);
}.plot(10);
)
// ...and use these to sonify them
(
{
var source, blips, dogs, birds;
//read the source
source = PlayBuf.ar(2, b.bufnum);
blips = Schmidt.kr(FluidNMFMatch.kr(source.sum,e.bufnum,3),0.5,[10,1,1000]);
dogs = SinOsc.ar(100,0,Lag.kr(blips[0],0.05,0.15));
birds = SinOsc.ar(1000,0,Lag.kr(blips[1],0.05,0.05));
[dogs, birds] + source;
}.play;
)
::
STRONG::Pretrained piano::
CODE::
//indeed
//load in the sound in and a pretrained dictionary
(
b = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-SA-UprightPianoPedalWide.wav");
c = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/filters/piano-dicts.wav");
)
b.play
c.query
//use the pretrained dictionary to compute activations of each notes to drive the amplitude of a resynth
(
{
var source, resynth;
source = PlayBuf.ar(2, b.bufnum,loop:1).sum;
resynth = SinOsc.ar((21..108).midicps, 0, FluidNMFMatch.kr(source,c.bufnum,88,10,4096).madd(0.002)).sum;
[source, resynth]
}.play
)
//now sample and hold the same stream to get notes identified, played and sent back via osc
(
{
var source, resynth, chain, trig, acts;
source = PlayBuf.ar(2,b.bufnum,loop:1).sum;
// built in attack detection, delayed until the stable part of the sound
chain = FFT(LocalBuf(256), source);
trig = TDelay.kr(Onsets.kr(chain, 0.5),0.1);
// samples and holds activation values that are scaled and capped, in effect thresholding them
acts = Latch.kr(FluidNMFMatch.kr(source,c.bufnum,88,10,4096).linlin(15,20,0,0.1),trig);
// resynths as in the previous example, with the values sent back to the language
resynth = SinOsc.ar((21..108).midicps, 0, acts).sum;
SendReply.kr(trig, '/activations', acts);
[source, resynth]
// [source, T2A.ar(trig)]
// resynth
}.play
)
// define a receiver for the activations
(
OSCdef(\listener, {|msg|
var data = msg[3..];
// removes the silent and spits out the indicies as midinote number
data.collect({arg item, i; if (item > 0.01, {i + 21})}).reject({arg item; item.isNil}).postln;
}, '/activations');
)
::
STRONG::Strange Resonators::
CODE::
//indeed
//to be completed
::

@ -0,0 +1,175 @@
//designing the guitar example
(
{
var source, todelay, delay1, delay2, delay3, feedback, mod1, mod2, mod3, mod4;
//read the source
source = PlayBuf.ar(1, b.bufnum);
// generate modulators that are coprime in frequency
mod1 = SinOsc.ar(1, 0, 0.001);
mod2 = SinOsc.ar(((617 * 181) / (461 * 991)), 0, 0.001);
mod3 = SinOsc.ar(((607 * 193) / (491 * 701)), 0, 0.001);
mod4 = SinOsc.ar(((613 * 191) / (463 * 601)), 0, 0.001);
// compress the signal to send to the delays
todelay = DelayN.ar(source,0.1, 800/44100, //delaying it to compensate for FluidNMFMatch's latency
LagUD.ar(K2A.ar(FluidNMFMatch.kr(source,e.bufnum,2,fftSize:2048)[0]), //reading the channel of the activations on the pick dictionary
80/44100, // lag uptime (compressor's attack)
1000/44100, // lag downtime (compressor's decay)
(1/(2.dbamp) // compressor's threshold inverted
)).clip(1,1000).pow((8.reciprocal)-1)); //clipping it so we only affect above threshold, then ratio(8) becomes the exponent of that base
// delay network
feedback = LocalIn.ar(3);// take the feedback in for the delays
delay1 = DelayC.ar(BPF.ar(todelay+feedback[1]+(feedback[2] * 0.3), 987, 6.7,0.8),0.123,0.122+(mod1*mod2));
delay2 = DelayC.ar(BPF.ar(todelay+feedback[0]+(feedback[2] * 0.3), 1987, 6.7,0.8),0.345,0.344+(mod3*mod4));
delay3 = DelayC.ar(BPF.ar(todelay+feedback[1], 1456, 6.7,0.8),0.567,0.566+(mod1*mod3),0.6);
LocalOut.ar([delay1,delay2, delay3]); // write the feedback for the delays
//listen to the delays only by uncommenting the following line
// [delay1+delay3,delay2+delay3]
source.dup + ([delay1+delay3,delay2+delay3]*(-3.dbamp))
}.play;
)
//doing the piano training
//load in the sound in and a pretrained dictionary
(
b = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-SA-UprightPianoPedalWide.wav");
c = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/filters/piano-dicts.wav");
)
b.play
c.query
//bufcompose a 88 note version
// d = Buffer.alloc(s,1);
// FluidBufCompose.process(s,c.bufnum,nChansA:88,srcBufNumB:d.bufnum,dstBufNum:d.bufnum)
// d.query
// d.write("/Users/pa/Documents/documents@hudd/research/projects/fluid corpus navigation/research/fluid_decomposition/AudioFiles/filters/piano-dicts.wav", "wav", "float32")
//now use the activations of each notes to generate a resynth
(
x = { arg dabuf = 1;
var source, resynth;
source = PlayBuf.ar(2, b.bufnum,loop:1).sum;
resynth = SinOsc.ar((21..108).midicps, 0, FluidNMFMatch.kr(source,dabuf,88,10,4096).madd(0.002)).sum;
[source, resynth]
}.play
)
d = Buffer.read(s,"/Users/pa/Desktop/piano-dicts/seed-dc-01.wav");
x.set(\dabuf, d.bufnum)
//now sample and hold the same stream to get notes identified, played and sent back via osc
(
{
var source, resynth, chain, trig, acts;
source = PlayBuf.ar(2,b.bufnum,loop:1).sum;
// built in attack detection, delayed until the stable part of the sound
chain = FFT(LocalBuf(256), source);
trig = TDelay.kr(Onsets.kr(chain, 0.5),0.1);
// samples and holds activation values that are scaled and capped, in effect thresholding them
acts = Latch.kr(FluidNMFMatch.kr(source,c.bufnum,88,10,4096).linlin(15,20,0,0.1),trig);
// resynths as in the previous example, with the values sent back to the language
resynth = SinOsc.ar((21..108).midicps, 0, acts).sum;
SendReply.kr(trig, '/activations', acts);
[source, resynth]
// [source, T2A.ar(trig)]
// resynth
}.play
)
// define a receiver for the activations
(
OSCdef(\listener, {|msg|
var data = msg[3..];
// removes the silent and spits out the indicies as midinote number
data.collect({arg item, i; if (item > 0.01, {i + 21})}).reject({arg item; item.isNil}).postln;
}, '/activations');
)
// (0..10).collect({arg item, i; if (item > 5, {i})}).reject({arg item; item.isNil}).postln;
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//set some buffers
(
b = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-BaB-SoundscapeGolcarWithDog.wav");
c = Buffer.new(s);
x = Buffer.new(s);
e = Buffer.alloc(s,1,1);
)
// train where all objects are present
(
Routine {
FluidBufNMF.process(s,b.bufnum,130000,150000,0,1, c.bufnum, x.bufnum, rank:10);
s.sync;
c.query;
}.play;
)
// wait for the query to print
// then find a rank for each item you want to find. You could also sum them. Try to find a rank with a good object-to-rest ratio
(
~dog =0;
{PlayBuf.ar(10,c.bufnum)[~dog]}.play
)
(
~bird = 5;
{PlayBuf.ar(10,c.bufnum)[~bird]}.play
)
// copy at least one other rank to a third rank, a sort of left-over channel
(
Routine{
FluidBufCompose.process(s,srcBufNumA: x.bufnum, startChanA:~dog, nChansA: 1, srcBufNumB: e.bufnum, dstBufNum: e.bufnum);
FluidBufCompose.process(s,srcBufNumA: x.bufnum, startChanA:~bird, nChansA: 1, dstStartChanA: 1, srcBufNumB: e.bufnum, dstBufNum: e.bufnum);
s.sync;
(0..9).removeAll([~dog,~bird]).do({|chan|FluidBufCompose.process(s,srcBufNumA: x.bufnum, startChanA:chan, nChansA: 1, dstStartChanA: 2, srcBufNumB: e.bufnum, dstBufNum: e.bufnum)});
s.sync;
e.query;
}.play;
)
e.plot;
//using this trained dictionary we can then see the activation...
(
{
var source, blips;
//read the source
source = PlayBuf.ar(2, b.bufnum);
blips = FluidNMFMatch.kr(source.sum,e.bufnum,3);
}.plot(10);
)
// ...and use some threshold to 'find' objects...
(
{
var source, blips;
//read the source
source = PlayBuf.ar(2, b.bufnum);
blips = Schmidt.kr(FluidNMFMatch.kr(source.sum,e.bufnum,3),0.5,[10,1,1000]);
}.plot(10);
)
// ...and use these to sonify them
(
{
var source, blips, dogs, birds;
//read the source
source = PlayBuf.ar(2, b.bufnum);
blips = Schmidt.kr(FluidNMFMatch.kr(source.sum,e.bufnum,3),0.5,[10,1,1000]);
dogs = SinOsc.ar(100,0,Lag.kr(blips[0],0.05,0.15));
birds = SinOsc.ar(1000,0,Lag.kr(blips[1],0.05,0.05));
[dogs, birds] + source;
}.play;
)
Loading…
Cancel
Save