tidying up

nix
Pierre Alexandre Tremblay 5 years ago
parent 360155f4c4
commit bbe715287e

@ -61,7 +61,7 @@ FluidMLPRegressor : FluidRealTimeModel {
predictPoint { |sourceBuffer, targetBuffer, action| predictPoint { |sourceBuffer, targetBuffer, action|
actions[\predictPoint] = [nil,{action.value(targetBuffer)}]; actions[\predictPoint] = [nil,{action.value(targetBuffer)}];
this.predictPointMsg(sourceBuffer, targetBuffer).postln; this.predictPointMsg(sourceBuffer, targetBuffer);
this.prSendMsg(this.predictPointMsg(sourceBuffer, targetBuffer)); this.prSendMsg(this.predictPointMsg(sourceBuffer, targetBuffer));
} }

@ -68,7 +68,6 @@ c.free
//for completion, here is just with rejection of outliers - not as good, but a decent second best! //for completion, here is just with rejection of outliers - not as good, but a decent second best!
FluidBufStats.process(s,~pitches, stats:~stats,outliersCutoff: 1.5) FluidBufStats.process(s,~pitches, stats:~stats,outliersCutoff: 1.5)
~stats.getn(0,14,{|x|~pitchIQRStats = x;x.reshape(7,2).do{|y| "%\t\t\t%\n".postf(y[0].round(0.1),y[1].round(0.01))}}) ~stats.getn(0,14,{|x|~pitchIQRStats = x;x.reshape(7,2).do{|y| "%\t\t\t%\n".postf(y[0].round(0.1),y[1].round(0.01))}})
//now that is impressive!
c = {SinOsc.ar(~pitchIQRStats[0],mul: 0.05)}.play c = {SinOsc.ar(~pitchIQRStats[0],mul: 0.05)}.play
b.play b.play
c.free c.free

@ -4,7 +4,7 @@
//slightly oversegment with novelty //slightly oversegment with novelty
//segments should still make sense but might cut a few elements in 2 or 3 //segments should still make sense but might cut a few elements in 2 or 3
~slicer = FluidSliceCorpus({ |src,start,num,dest| FluidBufNoveltySlice.kr(src,start,num,indices:dest, feature: 1, kernelSize: 29, threshold: 0.1, filterSize: 5, hopSize: 128)}); ~slicer = FluidSliceCorpus({ |src,start,num,dest| FluidBufNoveltySlice.kr(src,start,num,indices:dest, feature: 1, kernelSize: 29, threshold: 0.1, filterSize: 5, hopSize: 128, blocking: 1)});
~slicer.play(s, ~loader.buffer,~loader.index); ~slicer.play(s, ~loader.buffer,~loader.index);
//test the segmentation by looping them //test the segmentation by looping them
@ -45,7 +45,11 @@ a = Slider(w, Rect(10, 20, 330, 20))
writer = FluidDataSetWr.kr(~slicesShapes,label, -1, ~flatbuf[voice], Done.kr(flatten),blocking: 1); writer = FluidDataSetWr.kr(~slicesShapes,label, -1, ~flatbuf[voice], Done.kr(flatten),blocking: 1);
}); });
) )
~extractor.play(s,~loader.buffer, ~slicer.index);
(
t = Main.elapsedTime;
~extractor.play(s,~loader.buffer, ~slicer.index, action:{(Main.elapsedTime - t).postln;"Analysis done".postln});
)
~slicesMFCC.print ~slicesMFCC.print
~slicesShapes.print ~slicesShapes.print
@ -102,22 +106,24 @@ a = Slider(w, Rect(10, 20, 330, 20))
//copy the items to a subdataset from hear //copy the items to a subdataset from hear
winSize.do{|i| winSize.do{|i|
tempDict.put((i.asString), ~sliceDict["data"][(~orginalkeys[(i+head)]).asString]);//here one could curate which stats to take tempDict.put((i.asString), ~sliceDict["data"][(~orginalkeys[(i+head)]).asString]);//here one could curate which stats to take
"whichslices:%\n".postf(i+head); // "whichslices:%\n".postf(i+head);
}; };
~windowDS.load(Dictionary.newFrom([\cols, ~sliceDict["cols"].asInteger, \data, tempDict]), action: { ~windowDS.load(Dictionary.newFrom([\cols, ~sliceDict["cols"].asInteger, \data, tempDict]), action: {
"% - loaded\n".postf(head); // "% - loaded\n".postf(head);
//kmeans 2 and retrieve ordered array of class assignations //kmeans 2 and retrieve ordered array of class assignations
~kmeans.fitPredict(~windowDS, ~windowLS, action: {|x| ~kmeans.fitPredict(~windowDS, ~windowLS, action: {|x|
nbass = x; nbass = x;
"% - fitted1: ".postf(head); nbass.postln; // "% - fitted1: ".postf(head); nbass.postln;
if (nbass.includes(winSize.asFloat), { if (nbass.includes(winSize.asFloat), {
~kmeans.fitPredict(~windowDS, ~windowLS, {|x| ~kmeans.fitPredict(~windowDS, ~windowLS, {|x|
nbass = x; "% - fitted2: ".postf(head); nbass.postln; nbass = x;
// "% - fitted2: ".postf(head); nbass.postln;
if (nbass.includes(winSize.asFloat), { if (nbass.includes(winSize.asFloat), {
~kmeans.fitPredict(~windowDS, ~windowLS, {|x| ~kmeans.fitPredict(~windowDS, ~windowLS, {|x|
nbass = x; "% - fitted3: ".postf(head); nbass.postln; nbass = x;
// "% - fitted3: ".postf(head); nbass.postln;
}); });
}); });
}); });

@ -211,9 +211,7 @@ fork{
) )
//Above we sneakily made a dictionary of slice data for playback (bufnum,start,end). Let's throw it in a dataset //Above we sneakily made a dictionary of slice data for playback (bufnum,start,end). Let's throw it in a dataset
(
~slicedata = FluidDataSet(s); // will hold slice data (bufnum,start,end) for playback ~slicedata = FluidDataSet(s); // will hold slice data (bufnum,start,end) for playback
)
//dict -> dataset //dict -> dataset
( (

@ -53,7 +53,7 @@ Routine{
~simpleInput.addPoint((i+50).asString,b,{("Added Input" + (i+50)).postln}); ~simpleInput.addPoint((i+50).asString,b,{("Added Input" + (i+50)).postln});
~simpleOutput.addLabel((i+50).asString,"Red",{("Added Output" + (i+50)).postln}); ~simpleOutput.addLabel((i+50).asString,"Red",{("Added Output" + (i+50)).postln});
s.sync; s.sync;
} };
\done.postln; \done.postln;
}.play; }.play;
) )

@ -19,8 +19,8 @@ c = Buffer.alloc(s,1);
( (
Routine{ Routine{
n.do{|i| n.do{|i|
b.set(0,~idx[i].postln); b.set(0,~idx[i]);
c.set(0,~data[i].postln); c.set(0,~data[i]);
~simpleInput.addPoint(i.asString,b,{("Added Input" + i).postln}); ~simpleInput.addPoint(i.asString,b,{("Added Input" + i).postln});
~simpleOutput.addPoint(i.asString,c,{("Added Output" + i).postln}); ~simpleOutput.addPoint(i.asString,c,{("Added Output" + i).postln});
~mappingviz.set((~idx[i]/61.4).asInteger,~data[i]); ~mappingviz.set((~idx[i]/61.4).asInteger,~data[i]);
@ -57,5 +57,3 @@ Routine{
// look at the interpolated values // look at the interpolated values
~mappingresult.plot ~mappingresult.plot
(31416/61.4).asInteger

@ -113,7 +113,7 @@ Routine{
~normalize.transformPoint(~query_buf,~normbuf); ~normalize.transformPoint(~query_buf,~normbuf);
~standardize.transformPoint(~query_buf,~stdbuf); ~standardize.transformPoint(~query_buf,~stdbuf);
//query the single nearest neighbourg via 3 different data scaling. Depending on the random source at the begining, you will get small to large differences between the 3 answers! //query the single nearest neighbourg via 3 different data scaling. Depending on the random source at the begining, you should get (small or large) differences between the 3 answers!
[~tree,~normtree,~stdtree].do{|t| t.numNeighbours =1 }; [~tree,~normtree,~stdtree].do{|t| t.numNeighbours =1 };
~tree.kNearest(~query_buf, {|x| ("Original:" + x).post;~tree.kNearestDist(~query_buf, {|x| (" with a distance of " + x).postln});}); ~tree.kNearest(~query_buf, {|x| ("Original:" + x).post;~tree.kNearestDist(~query_buf, {|x| (" with a distance of " + x).postln});});
~normtree.kNearest(~normbuf, {|x| ("Normalized:" + x).post;~normtree.kNearestDist(~normbuf, {|x| (" with a distance of " + x).postln});}); ~normtree.kNearest(~normbuf, {|x| ("Normalized:" + x).post;~normtree.kNearestDist(~normbuf, {|x| (" with a distance of " + x).postln});});

Loading…
Cancel
Save