a few defers here and there, and removing the names of the datasets

nix
Pierre Alexandre Tremblay 5 years ago
parent 2a57d0a127
commit f300a7d196

@ -1,8 +1,8 @@
// define a few processes // define a few processes
( (
~ds = FluidDataSet(s,\ds10); ~ds = FluidDataSet(s);
~dsW = FluidDataSet(s,\ds10W); ~dsW = FluidDataSet(s);
~dsL = FluidDataSet(s,\ds10L); ~dsL = FluidDataSet(s);
//define as many buffers as we have parallel voices/threads in the extractor processing (default is 4) //define as many buffers as we have parallel voices/threads in the extractor processing (default is 4)
~loudbuf = 4.collect{Buffer.new}; ~loudbuf = 4.collect{Buffer.new};
~weightbuf = 4.collect{Buffer.new}; ~weightbuf = 4.collect{Buffer.new};

@ -90,7 +90,7 @@ Routine{
~normtree.fit(~normed_dataset) ~normtree.fit(~normed_dataset)
~normtree.kNearest(~normbuf, {|x| ("Labels:" + x).postln}); ~normtree.kNearest(~normbuf, {|x| ("Labels:" + x).postln});
~normtree.kNearestDist(~normbuf, {|x| ("Distances:" + x).postln}); ~normtree.kNearestDist(~normbuf, {|x| ("Distances:" + x).postln});
// its nearest neighbourg is still itself as it should be, but the 2nd neighbourg will probably have changed. The distance is now different too // its nearest neighbourg is still itself as it should be, but the 2nd neighbourg might have changed. The distance is now different too
// standardize that same point (~query_buf) to be at the right scale // standardize that same point (~query_buf) to be at the right scale
~stdbuf = Buffer.alloc(s,~nb_of_dim); ~stdbuf = Buffer.alloc(s,~nb_of_dim);
@ -102,7 +102,7 @@ Routine{
~stdtree.fit(~standardized_dataset) ~stdtree.fit(~standardized_dataset)
~stdtree.kNearest(~stdbuf, {|x| ("Labels:" + x).postln}); ~stdtree.kNearest(~stdbuf, {|x| ("Labels:" + x).postln});
~stdtree.kNearestDist(~stdbuf, {|x| ("Distances:" + x).postln}); ~stdtree.kNearestDist(~stdbuf, {|x| ("Distances:" + x).postln});
// its nearest neighbourg is still itself as it should be, but the 2nd neighbourg will probably have changed yet again. The distance is also different too // its nearest neighbourg is still itself as it should be, but the 2nd neighbourg might have changed yet again. The distance is also different too
// where it starts to be interesting is when we query points that are not in our original dataset // where it starts to be interesting is when we query points that are not in our original dataset

@ -8,13 +8,13 @@ Routine{
d = Dictionary.new; d = Dictionary.new;
d.add(\cols -> 2); d.add(\cols -> 2);
d.add(\data -> Dictionary.newFrom(["f-f", [0,0], "f-t", [0,1], "t-f", [1,0], "t-t", [1,1]])); d.add(\data -> Dictionary.newFrom(["f-f", [0,0], "f-t", [0,1], "t-f", [1,0], "t-t", [1,1]]));
~mlpHelpSource = FluidDataSet.new(s,\mlpHelpSource); ~mlpHelpSource = FluidDataSet.new(s);
s.sync; s.sync;
~mlpHelpSource.load(d); ~mlpHelpSource.load(d);
s.sync; s.sync;
d.add(\cols -> 1); d.add(\cols -> 1);
d.add(\data -> Dictionary.newFrom(["f-f", [0], "f-t", [1], "t-f", [1], "t-t", [0]])); d.add(\data -> Dictionary.newFrom(["f-f", [0], "f-t", [1], "t-f", [1], "t-t", [0]]));
~mlpHelpTarget = FluidDataSet.new(s,\mlpHelpTarget); ~mlpHelpTarget = FluidDataSet.new(s);
s.sync; s.sync;
~mlpHelpTarget.load(d); ~mlpHelpTarget.load(d);
s.sync; s.sync;
@ -27,7 +27,7 @@ Routine{
// make an MLPregressor // make an MLPregressor
~mlp = FluidMLPRegressor(s, [3], FluidMLPRegressor.sigmoid, FluidMLPRegressor.sigmoid,maxIter:1000,learnRate: 0.1,momentum: 0.1,batchSize: 1,validation: 0);//1000 epoch at a time ~mlp = FluidMLPRegressor(s, [3], FluidMLPRegressor.sigmoid, FluidMLPRegressor.sigmoid,maxIter:1000,learnRate: 0.1,momentum: 0.1,batchSize: 1,validation: 0);//1000 epoch at a time
//train on it and observe the error //train it by executing the following line multiple time, and observe the error
~mlp.fit(~mlpHelpSource,~mlpHelpTarget,{|x|x.postln;}); ~mlp.fit(~mlpHelpSource,~mlpHelpTarget,{|x|x.postln;});
//to make a plot of the error let's do a classic 'shades of truth' (a grid of 11 x 11 with each values of truth between 0 and 1 //to make a plot of the error let's do a classic 'shades of truth' (a grid of 11 x 11 with each values of truth between 0 and 1
@ -37,7 +37,7 @@ Routine{
d.add(\cols -> 2); d.add(\cols -> 2);
d.add(\data -> Dictionary.newFrom(121.collect{|x|[x.asString, [x.div(10)/10,x.mod(10)/10]]}.flatten)); d.add(\data -> Dictionary.newFrom(121.collect{|x|[x.asString, [x.div(10)/10,x.mod(10)/10]]}.flatten));
~mlpHelpShades = FluidDataSet.new(s,\mlpHelpShades); ~mlpHelpShades = FluidDataSet.new(s);
s.sync; s.sync;
~mlpHelpShades.load(d); ~mlpHelpShades.load(d);
s.sync; s.sync;
@ -48,7 +48,7 @@ Routine{
~mlpHelpShades.print ~mlpHelpShades.print
// let's make a destination for our regressions // let's make a destination for our regressions
~mlpHelpRegressed = FluidDataSet.new(s,\mlpHelpRegressed); ~mlpHelpRegressed = FluidDataSet.new(s);
// then predict the full DataSet in our trained network // then predict the full DataSet in our trained network
~mlp.predict(~mlpHelpShades,~mlpHelpRegressed); ~mlp.predict(~mlpHelpShades,~mlpHelpRegressed);
@ -77,6 +77,7 @@ w.refresh;
w.front; w.front;
) )
~mlp.free
~mlpHelpShades.free ~mlpHelpShades.free
~mlpHelpSource.free ~mlpHelpSource.free
~mlpHelpTarget.free ~mlpHelpTarget.free

@ -7,8 +7,8 @@ var output = Buffer.alloc(s,10);
var mlp = FluidMLPRegressor(s,[6],activation: 1,outputActivation: 1,maxIter: 1000,learnRate: 0.1,momentum: 0,batchSize: 1,validation: 0); var mlp = FluidMLPRegressor(s,[6],activation: 1,outputActivation: 1,maxIter: 1000,learnRate: 0.1,momentum: 0,batchSize: 1,validation: 0);
var entry = 0; var entry = 0;
~inData = FluidDataSet(s,\indata); ~inData = FluidDataSet(s);
~outData = FluidDataSet(s,\outdata); ~outData = FluidDataSet(s);
w = Window("ChaosSynth", Rect(10, 10, 790, 320)).front; w = Window("ChaosSynth", Rect(10, 10, 790, 320)).front;
a = MultiSliderView(w,Rect(10, 10, 400, 300)).elasticMode_(1).isFilled_(1); a = MultiSliderView(w,Rect(10, 10, 400, 300)).elasticMode_(1).isFilled_(1);
@ -42,8 +42,10 @@ f.action = {arg x,y; //if trained, predict the point f.x f.y
c = Button(w, Rect(730,240,50, 20)).states_([["train", Color.red, Color.white], ["trained", Color.white, Color.grey]]).action_{ c = Button(w, Rect(730,240,50, 20)).states_([["train", Color.red, Color.white], ["trained", Color.white, Color.grey]]).action_{
mlp.fit(~inData,~outData,{|x| mlp.fit(~inData,~outData,{|x|
trained = 1; trained = 1;
c.value = 1; {
e.value = x.round(0.001).asString; c.value = 1;
e.value = x.round(0.001).asString;
}.defer;
});//train the network });//train the network
}; };
d = Button(w, Rect(730,10,50, 20)).states_([["entry", Color.white, Color.grey], ["entry", Color.red, Color.white]]).action_{ d = Button(w, Rect(730,10,50, 20)).states_([["entry", Color.white, Color.grey], ["entry", Color.red, Color.white]]).action_{

@ -1,9 +1,9 @@
s.reboot; s.reboot;
//Preliminaries: we want some audio, a couple of FluidDataSets, some Buffers //Preliminaries: we want some audio, a couple of FluidDataSets, some Buffers
( (
~raw = FluidDataSet(s,\Mel40); ~raw = FluidDataSet(s);
~norm = FluidDataSet(s,\Mel40n); ~norm = FluidDataSet(s);
~retrieved = FluidDataSet(s,\ae2); ~retrieved = FluidDataSet(s);
~audio = Buffer.read(s,File.realpath(FluidBufMelBands.class.filenameSymbol).dirname +/+ "../AudioFiles/Tremblay-ASWINE-ScratchySynth-M.wav"); ~audio = Buffer.read(s,File.realpath(FluidBufMelBands.class.filenameSymbol).dirname +/+ "../AudioFiles/Tremblay-ASWINE-ScratchySynth-M.wav");
~melfeatures = Buffer.new(s); ~melfeatures = Buffer.new(s);
~stats = Buffer.alloc(s, 7, 40); ~stats = Buffer.alloc(s, 7, 40);
@ -56,7 +56,7 @@ FluidBufMelBands.process(s,~audio, features: ~melfeatures,action: {\done.postln;
~retrieved.print; ~retrieved.print;
//let's normalise it for display //let's normalise it for display
~normData = FluidDataSet(s,\ae2N); ~normData = FluidDataSet(s);
~reducedarray = Array.new(100); ~reducedarray = Array.new(100);
~normalView = FluidNormalize(s,0.1,0.9); ~normalView = FluidNormalize(s,0.1,0.9);
( (

@ -2,9 +2,9 @@
//Here we make a 3-points pair of dataset. //Here we make a 3-points pair of dataset.
~dsIN = FluidDataSet(s,\dsIN); ~dsIN = FluidDataSet(s);
~dsIN.load(Dictionary.newFrom([\cols, 1, \data, Dictionary.newFrom([\point1, [10], \point2, [20], \point3, [30]])])); ~dsIN.load(Dictionary.newFrom([\cols, 1, \data, Dictionary.newFrom([\point1, [10], \point2, [20], \point3, [30]])]));
~dsOUT = FluidDataSet(s,\dsOUT); ~dsOUT = FluidDataSet(s);
~dsOUT.load(Dictionary.newFrom([\cols, 1, \data, Dictionary.newFrom([\point1, [0.8], \point2, [0.2], \point3, [0.5]])])); ~dsOUT.load(Dictionary.newFrom([\cols, 1, \data, Dictionary.newFrom([\point1, [0.8], \point2, [0.2], \point3, [0.5]])]));
//check the values //check the values
@ -13,13 +13,13 @@
(0.dup(10) ++ 0.8 ++ 0.dup(9) ++ 0.2 ++ 0.dup(9) ++ 0.5 ++ 0.dup(10)).plot(\source,discrete: true, minval:0, maxval: 1).plotMode=\bars; (0.dup(10) ++ 0.8 ++ 0.dup(9) ++ 0.2 ++ 0.dup(9) ++ 0.5 ++ 0.dup(10)).plot(\source,discrete: true, minval:0, maxval: 1).plotMode=\bars;
//Let's make a complete dataset to predict each points in our examples: //Let's make a complete dataset to predict each points in our examples:
~dsALLin = FluidDataSet(s,\dsALLin); ~dsALLin = FluidDataSet(s);
~dsALLin.load(Dictionary.newFrom([\cols, 1, \data, Dictionary.newFrom(Array.fill(41,{|x| [x.asSymbol, [x]];}).flatten(1);)])); ~dsALLin.load(Dictionary.newFrom([\cols, 1, \data, Dictionary.newFrom(Array.fill(41,{|x| [x.asSymbol, [x]];}).flatten(1);)]));
~dsALLin.print ~dsALLin.print
//We'll regress these values via KNN and plot //We'll regress these values via KNN and plot
~regK = FluidKNNRegressor(s,numNeighbours: 2,weight: 1); ~regK = FluidKNNRegressor(s,numNeighbours: 2,weight: 1);
~dsALLknn = FluidDataSet(s,\dsALLknn); ~dsALLknn = FluidDataSet(s);
~regK.fit(~dsIN,~dsOUT); ~regK.fit(~dsIN,~dsOUT);
~regK.predict(~dsALLin,~dsALLknn); ~regK.predict(~dsALLin,~dsALLknn);
@ -36,7 +36,7 @@
//Let's do the same process with MLP //Let's do the same process with MLP
~regM = FluidMLPRegressor(s,hidden: [4],activation: 1,outputActivation: 1,maxIter: 10000,learnRate: 0.1,momentum: 0,batchSize: 1,validation: 0); ~regM = FluidMLPRegressor(s,hidden: [4],activation: 1,outputActivation: 1,maxIter: 10000,learnRate: 0.1,momentum: 0,batchSize: 1,validation: 0);
~dsALLmlp = FluidDataSet(s,\dsALLmlp); ~dsALLmlp = FluidDataSet(s);
~regM.fit(~dsIN,~dsOUT,{|x|x.postln;}); ~regM.fit(~dsIN,~dsOUT,{|x|x.postln;});
~regM.predict(~dsALLin,~dsALLmlp); ~regM.predict(~dsALLin,~dsALLmlp);
@ -86,7 +86,7 @@
~dsALLknn.dump{|x| 41.do{|i| ~dsALLknn.dump{|x| 41.do{|i|
~knnALLval.add((x["data"][i.asString])); ~knnALLval.add((x["data"][i.asString]));
};//draw everything };//draw everything
[~source, ~knnALLval.flatten(1), ~mlpALLval.flatten(1)].flop.flatten(1).plot(\source,numChannels: 3, discrete: false, minval:0, maxval: 1).plotMode=\bars; {[~source, ~knnALLval.flatten(1), ~mlpALLval.flatten(1)].flop.flatten(1).plot(\source,numChannels: 3, discrete: false, minval:0, maxval: 1).plotMode=\bars;}.defer;
}; };
}); });
}); });

Loading…
Cancel
Save