gross mistake corrected in example 12. TODO: sort file boundaries issues in the clustering

nix
Pierre Alexandre Tremblay 5 years ago
parent 2456ac961e
commit e0ee799f5f

@ -51,6 +51,7 @@ t = Main.elapsedTime;
~extractor.play(s,~loader.buffer, ~slicer.index, action:{(Main.elapsedTime - t).postln;"Analysis done".postln}); ~extractor.play(s,~loader.buffer, ~slicer.index, action:{(Main.elapsedTime - t).postln;"Analysis done".postln});
) )
~originalindices.size
~slicesMFCC.print ~slicesMFCC.print
~slicesShapes.print ~slicesShapes.print
@ -94,7 +95,7 @@ t = Main.elapsedTime;
//retrieve the dataset as dictionary //retrieve the dataset as dictionary
~curated.dump{|x|~sliceDict = x;}; ~curated.dump{|x|~sliceDict = x;};
~originalslicesarray = (~originalindices.flatten ++ ~loader.buffer.numFrames).asSet.asArray.sort ~originalslicesarray = ~originalindices.flop[0] ++ ~loader.buffer.numFrames
~orginalkeys = Array.newFrom(~slicer.index.keys).sort{|a,b| ~slicer.index[a][\bounds][0]< ~slicer.index[b][\bounds][0]} ~orginalkeys = Array.newFrom(~slicer.index.keys).sort{|a,b| ~slicer.index[a][\bounds][0]< ~slicer.index[b][\bounds][0]}
//the windowed function, recursive to deal with sync dependencies //the windowed function, recursive to deal with sync dependencies
@ -174,7 +175,7 @@ t = Main.elapsedTime;
f = File.new("/tmp/clusteredslices-" ++ Date.getDate.stamp ++".rpp","w+"); f = File.new("/tmp/clusteredslices-" ++ Date.getDate.stamp ++".rpp","w+");
if (f.isOpen , { if (f.isOpen , {
var path, prevpath ="", sr, count, dur; var path, prevpath ="", sr, count, dur, realDur;
//write the header //write the header
f.write("<REAPER_PROJECT 0.1 \"5.99/OSX64\" 1603037150\n\n"); f.write("<REAPER_PROJECT 0.1 \"5.99/OSX64\" 1603037150\n\n");
@ -204,17 +205,17 @@ if (f.isOpen , {
f.write("<TRACK\nNAME \"clustered output\"\n"); f.write("<TRACK\nNAME \"clustered output\"\n");
// iterate through the items in the track // iterate through the items in the track
~newkeys.do{|v, i| ~newkeys.do{|v, i|
dur = ~newindices[i+1] - ~newindices[i];
if (dur > 0, {
path = ~slicer.index[v][\path]; path = ~slicer.index[v][\path];
if (path != prevpath, { if (path != prevpath, {
sr = ~slicer.index[v][\sr]; sr = ~slicer.index[v][\sr];
prevpath = path; prevpath = path;
count = 0; count = 0;
}); });
dur = ~newindices[i+1] - ~newindices[i];
if (dur > 0, {
f.write("<ITEM\nPOSITION " ++ (~newindices[i] / sr) ++ "\nLENGTH " ++ (dur / sr) ++ "\nNAME \"" ++ v ++ "\"\nSOFFS " ++ (count / sr) ++ "\n<SOURCE WAVE\nFILE \"" ++ path ++ "\"\n>\n>\n"); f.write("<ITEM\nPOSITION " ++ (~newindices[i] / sr) ++ "\nLENGTH " ++ (dur / sr) ++ "\nNAME \"" ++ v ++ "\"\nSOFFS " ++ (count / sr) ++ "\n<SOURCE WAVE\nFILE \"" ++ path ++ "\"\n>\n>\n");
});
count = count + dur; count = count + dur;
});
}; };
//write the track footer //write the track footer
f.write(">\n"); f.write(">\n");
@ -226,3 +227,4 @@ if (f.isOpen , {
) )
(then open the time-stamped reaper file clusterdslice in the folder tmp) (then open the time-stamped reaper file clusterdslice in the folder tmp)
"open /tmp".unixCmd

@ -86,11 +86,11 @@ fork{
~slices.put("data",Dictionary()); ~slices.put("data",Dictionary());
//Collect each set of onsets into a language side array and store them in a dict //Collect each set of onsets into a language side array and store them in a dict
~index_buffers.do{|b,i| // iterate over the 4 buffers ~index_buffers.do{|b,i| // iterate over the input buffer array
{ {
b.loadToFloatArray( // load to language side array b.loadToFloatArray( // load to language side array
action:{|indices| action:{|indices|
//Glue the first and last samples of the buffer on to the index list, and place in dictionary wiht the //Glue the first and last samples of the buffer on to the index list, and place in dictionary with the
//Buffer object as a key //Buffer object as a key
index_arrays.put(~audio_buffers[i], Array.newFrom([0] ++ indices ++ (~audio_buffers[i].numFrames - 1))); index_arrays.put(~audio_buffers[i], Array.newFrom([0] ++ indices ++ (~audio_buffers[i].numFrames - 1)));
@ -114,7 +114,6 @@ fork{
index_arrays.keysValuesDo{|buffer, indices| index_arrays.keysValuesDo{|buffer, indices|
indices.doAdjacentPairs{|start,end,num| indices.doAdjacentPairs{|start,end,num|
var analysis = Routine({|counter| var analysis = Routine({|counter|
FluidBufMelBands.processBlocking( FluidBufMelBands.processBlocking(
server:s, server:s,
source:buffer, source:buffer,

Loading…
Cancel
Save