removed the object finder and the pretrained piano examples from nmfmatch help and put it in the example folder

nix
Pierre Alexandre Tremblay 6 years ago
parent ca34201054
commit e407c2efb8

@ -182,134 +182,8 @@ z.do({|chan| FluidBufCompose.process(s, ~bases, startChan:chan, numChans: 1, des
)
::
STRONG::Object finder::
CODE::
//set some buffers
(
b = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-BaB-SoundscapeGolcarWithDog.wav");
c = Buffer.new(s);
x = Buffer.new(s);
e = Buffer.new(s);
)
// train where all objects are present
(
Routine {
FluidBufNMF.process(s,b,130000,150000,0,1, c, x, components:10);
c.query;
}.play;
)
// wait for the query to print
// then find a component for each item you want to find. You could also sum them. Try to find a component with a good object-to-rest ratio
(
~dog =4;
{PlayBuf.ar(10,c)[~dog]}.play
)
(
~bird = 3;
{PlayBuf.ar(10,c)[~bird]}.play
)
// copy at least one other component to a third filter, a sort of left-over channel
(
Routine{
FluidBufCompose.process(s, x, startChan:~dog, numChans: 1, destination: e);
FluidBufCompose.process(s, x, startChan:~bird, numChans: 1, destStartChan: 1, destination: e, destGain:1);
(0..9).removeAll([~dog,~bird]).do({|chan|FluidBufCompose.process(s,x, startChan:chan, numChans: 1, destStartChan: 2, destination: e, destGain:1)});
e.query;
}.play;
)
e.plot;
//using this trained basis we can then see the activation... (wait for 5 seconds before it prints!)
(
{
var source, blips;
//read the source
source = PlayBuf.ar(2, b);
blips = FluidNMFMatch.kr(source.sum,e,3);
}.plot(5);
)
// ...and use some threshold to 'find' objects...
(
{
var source, blips;
//read the source
source = PlayBuf.ar(2, b);
blips = Schmidt.kr(FluidNMFMatch.kr(source.sum,e,3),0.5,[10,1,1000]);
}.plot(5);
)
// ...and use these to sonify them
(
{
var source, blips, dogs, birds;
//read the source
source = PlayBuf.ar(2, b);
blips = Schmidt.kr(FluidNMFMatch.kr(source.sum,e,3),0.5,[10,1,1000]);
dogs = SinOsc.ar(100,0,Lag.kr(blips[0],0.05,0.15));
birds = SinOsc.ar(1000,0,Lag.kr(blips[1],0.05,0.05));
[dogs, birds] + source;
}.play;
)
::
STRONG::Pretrained piano::
CODE::
//load in the sound in and a pretrained basis
(
b = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-SA-UprightPianoPedalWide.wav");
c = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/filters/piano-dicts.wav");
)
b.play
c.query
//use the pretrained bases to compute activations of each notes to drive the amplitude of a resynth
(
{
var source, resynth;
source = PlayBuf.ar(2, b,loop:1).sum;
resynth = SinOsc.ar((21..108).midicps, 0, FluidNMFMatch.kr(source,c,88,10,4096).madd(0.002)).sum;
[source, resynth]
}.play
)
//now sample and hold the same stream to get notes identified, played and sent back via osc
(
{
var source, resynth, chain, trig, acts;
source = PlayBuf.ar(2,b,loop:1).sum;
// built in attack detection, delayed until the stable part of the sound
chain = FFT(LocalBuf(256), source);
trig = TDelay.kr(Onsets.kr(chain, 0.5),0.1);
// samples and holds activation values that are scaled and capped, in effect thresholding them
acts = Latch.kr(FluidNMFMatch.kr(source,c,88,10,4096).linlin(15,20,0,0.1),trig);
// resynths as in the previous example, with the values sent back to the language
resynth = SinOsc.ar((21..108).midicps, 0, acts).sum;
SendReply.kr(trig, '/activations', acts);
[source, resynth]
// [source, T2A.ar(trig)]
// resynth
}.play
)
// define a receiver for the activations
(
OSCdef(\listener, {|msg|
var data = msg[3..];
// removes the silent and spits out the indicies as midinote number
data.collect({arg item, i; if (item > 0.01, {i + 21})}).reject({arg item; item.isNil}).postln;
}, '/activations');
)
::
STRONG::Strange Resonators::
CODE::
//load the source and declare buffers/arrays

@ -1,6 +1,5 @@
// A complex example of using composition as an Mid-Side FIR filtering process
// load a stereo buffer and initialise the many destinations
(
b = Buffer.read(s,File.realpath(FluidBufCompose.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-SA-UprightPianoPedalWide.wav");
@ -66,4 +65,3 @@ f.play;
// compare with the original
b.play;
::

@ -0,0 +1,74 @@
// using NMF, splitting a small portion, then associating components to targets, then thresholding on these target's activations to find objects.
//set some buffers
(
b = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-BaB-SoundscapeGolcarWithDog.wav");
c = Buffer.new(s);
x = Buffer.new(s);
e = Buffer.new(s);
)
// train where all objects are present
(
Routine {
FluidBufNMF.process(s,b,130000,150000,0,1, c, x, components:10);
c.query;
}.play;
)
// wait for the query to print
// then find a component for each item you want to find. You could also sum them. Try to find a component with a good object-to-rest ratio
(
~dog =1;
{PlayBuf.ar(10,c)[~dog]}.play
)
(
~bird = 3;
{PlayBuf.ar(10,c)[~bird]}.play
)
// copy at least one other component to a third filter, a sort of left-over channel
(
Routine{
FluidBufCompose.process(s, x, startChan:~dog, numChans: 1, destination: e);
FluidBufCompose.process(s, x, startChan:~bird, numChans: 1, destStartChan: 1, destination: e, destGain:1);
(0..9).removeAll([~dog,~bird]).do({|chan|FluidBufCompose.process(s,x, startChan:chan, numChans: 1, destStartChan: 2, destination: e, destGain:1)});
e.query;
}.play;
)
e.plot;
//using this trained basis we can then see the activation... (wait for 5 seconds before it prints!)
(
{
var source, blips;
//read the source
source = PlayBuf.ar(2, b);
blips = FluidNMFMatch.kr(source.sum,e,3);
}.plot(5);
)
// ...and use some threshold to 'find' objects...
(
{
var source, blips;
//read the source
source = PlayBuf.ar(2, b);
blips = Schmidt.kr(FluidNMFMatch.kr(source.sum,e,3),0.5,[10,1,1000]);
}.plot(5);
)
// ...and use these to sonify them
(
{
var source, blips, dogs, birds;
//read the source
source = PlayBuf.ar(2, b);
blips = Schmidt.kr(FluidNMFMatch.kr(source.sum,e,3),0.5,[10,1,1000]);
dogs = SinOsc.ar(100,0,Lag.kr(blips[0],0.05,0.15));
birds = SinOsc.ar(1000,0,Lag.kr(blips[1],0.05,0.05));
[dogs, birds] + source;
}.play;
)

@ -0,0 +1,51 @@
// Using an 88-components piano base to do polyphonic pitch tracking
//load in the sound in and a pretrained basis
(
b = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-SA-UprightPianoPedalWide.wav");
c = Buffer.read(s,File.realpath(FluidNMFMatch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/filters/piano-dicts.wav");
)
b.play
c.query
//use the pretrained bases to compute activations of each notes to drive the amplitude of a resynth
(
{
var source, resynth;
source = PlayBuf.ar(2, b,loop:1).sum;
resynth = SinOsc.ar((21..108).midicps, 0, FluidNMFMatch.kr(source,c,88,10,4096).madd(0.002)).sum;
[source, resynth]
}.play
)
//now sample and hold the same stream to get notes identified, played and sent back via osc
(
{
var source, resynth, chain, trig, acts;
source = PlayBuf.ar(2,b,loop:1).sum;
// built in attack detection, delayed until the stable part of the sound
chain = FFT(LocalBuf(256), source);
trig = TDelay.kr(Onsets.kr(chain, 0.5),0.1);
// samples and holds activation values that are scaled and capped, in effect thresholding them
acts = Latch.kr(FluidNMFMatch.kr(source,c,88,10,4096).linlin(15,20,0,0.1),trig);
// resynths as in the previous example, with the values sent back to the language
resynth = SinOsc.ar((21..108).midicps, 0, acts).sum;
SendReply.kr(trig, '/activations', acts);
[source, resynth]
// [source, T2A.ar(trig)]
// resynth
}.play
)
// define a receiver for the activations
(
OSCdef(\listener, {|msg|
var data = msg[3..];
// removes the silent and spits out the indicies as midinote number
data.collect({arg item, i; if (item > 0.01, {i + 21})}).reject({arg item; item.isNil}).postln;
}, '/activations');
)
Loading…
Cancel
Save