sorted parallel iterations thanks to @weefuzzy

nix
Pierre Alexandre Tremblay 6 years ago
parent ab3e0e1376
commit 14aec9832a

@ -150,7 +150,6 @@ FluidProcessSlices{
{ {
var numframes,feature; var numframes,feature;
numframes = v[\bounds].reverse.reduce('-'); numframes = v[\bounds].reverse.reduce('-');
jobID.postln;
feature = featureFunc.value(sourceBuffer, v[\bounds][0], numframes, k->v); feature = featureFunc.value(sourceBuffer, v[\bounds][0], numframes, k->v);
SendReply.kr(Done.kr(feature),'/doneFeature' ++ uid ++ idx); SendReply.kr(Done.kr(feature),'/doneFeature' ++ uid ++ idx);
FreeSelfWhenDone.kr(feature); FreeSelfWhenDone.kr(feature);

@ -1,9 +1,10 @@
// define a few processes // define a few processes
( (
~ds = FluidDataSet(s,\test); // still need a name on the server to make sure we do not forget it exists. it is now permanent aka will resist cmd+. ~ds = FluidDataSet(s,\test); // still need a name on the server to make sure we do not forget it exists. it is now permanent aka will resist cmd+.
~mfccbuf = Buffer.allocConsecutive(4,s,1); //define as many buffers as we have parallel voices/threads in the extractor processing (default is 4)
~statsbuf = Buffer.allocConsecutive(4,s,1); ~mfccbuf = 4.collect{Buffer.new};
~flatbuf = Buffer.allocConsecutive(4,s,1); ~statsbuf = 4.collect{Buffer.new};
~flatbuf = 4.collect{Buffer.new};
// here we instantiate a loader which creates a single large buffer with a dictionary of what was included in it // here we instantiate a loader which creates a single large buffer with a dictionary of what was included in it
// ~loader = FluidLoadFolder("/Volumes/machins/projets/newsfeed/smallnum/"); // ~loader = FluidLoadFolder("/Volumes/machins/projets/newsfeed/smallnum/");
@ -15,13 +16,14 @@
}); });
// here we instantiate a process of description and dataset writing, which will run each slice of the previous slice and write the entry. Note the chain of Done.kr triggers. // here we instantiate a process of description and dataset writing, which will run each slice of the previous slice and write the entry. Note the chain of Done.kr triggers.
~extractor = FluidProcessSlices({|src,start, num, idx, a, b, i| ~extractor = FluidProcessSlices({|src,start,num,data|
var mfcc, stats, writer, flatten,mfccBuf, statsBuf, flatBuf; var mfcc, stats, writer, flatten,mfccBuf, statsBuf, flatBuf, label, voice;
[src,start, num, idx, a, b, i].postln; label = data.key;
mfcc = FluidBufMFCC.kr(src,startFrame:start,numFrames:num,numChans:1,features:~mfccbuf[i],trig:1); voice = data.value[\voice];
stats = FluidBufStats.kr(~mfccbuf[i],stats:~statsbuf[i],trig:Done.kr(mfcc)); mfcc = FluidBufMFCC.kr(src,startFrame:start,numFrames:num,numChans:1,features:~mfccbuf[voice],trig:1);
flatten = FluidBufFlatten.kr(~statsbuf[i],~flatbuf[i],trig:Done.kr(stats)); stats = FluidBufStats.kr(~mfccbuf[voice],stats:~statsbuf[voice],trig:Done.kr(mfcc));
writer = FluidDataSetWr.kr(idx,~flatbuf[i],~ds,trig:Done.kr(flatten)) flatten = FluidBufFlatten.kr(~statsbuf[voice],~flatbuf[voice],trig:Done.kr(stats));
writer = FluidDataSetWr.kr(label,~flatbuf[voice],~ds,trig:Done.kr(flatten))
}); });
) )
@ -119,9 +121,8 @@ t = Main.elapsedTime;
// write the dataset to file with the native JSON // write the dataset to file with the native JSON
~ds.write("/tmp/sc-dataset.json") ~ds.write("/tmp/sc-dataset.json")
~ds.clear
// open the file in your favourite json editor // open the file in your default json editor
"open /tmp/sc-dataset.json".unixCmd "open /tmp/sc-dataset.json".unixCmd
////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////

Loading…
Cancel
Save