clean up examples/guides folder

nix
Ted Moore 4 years ago
parent 54d1ea523a
commit d5011f5941

@ -1,285 +0,0 @@
/*
=================================================
| |
| LOAD AND ANALYZE THE SOURCE MATERIAL |
| |
=================================================
*/
(
// ============= 1. LOAD SOME FILES TO BE THE SOURCE MATERIAL ===================
// put your own folder path here! it's best if they're all mono for now.
~source_files_folder = "/Users/macprocomputer/Desktop/audio_files/";
~loader = FluidLoadFolder(~source_files_folder); // this is a nice helper class that will load a bunch of files from a folder.
~loader.play(s,{ // .play will cause it to *actually* do the loading
// we really just want access to the buffer. there is also a .index with some info about the files
// but we'll igore that for now
~source_buf = ~loader.buffer;
"all files loaded".postln;
// double check if they're all mono? the buffer of the loaded files will have as many channels as the file with the most channels
// so if this is 1, then we know all the files were mono.
"num channels: %".format(~source_buf.numChannels).postln
});
)
~loader.buffer
~loader.index.dopostln
// ~source_buf.plot
FluidBufNMF
(
// ==================== 2. SLICE THE SOURCE MATERIAL ACCORDING TO SPECTRAL ONSETS =========================
~source_indices_buf = Buffer(s); // a buffer for writing the indices into
FluidBufOnsetSlice.process(s,~source_buf,indices:~source_indices_buf,metric:9,threshold:0.15,minSliceLength:9,action:{ // do the slicing
~source_indices_buf.loadToFloatArray(action:{
arg indices_array;
// post the results so that you can tweak the parameters and get what you want
"found % slices".format(indices_array.size-1).postln;
"average length: % seconds".format((~source_buf.duration / (indices_array.size-1)).round(0.001)).postln;
})
});
)
(
// =========================== 3. DEFINE A FUNCTION FOR DOING THE ANALYSIS ===================================
~analyze_to_dataset = {
arg audio_buffer, slices_buffer, action; // the audio buffer to analyze, a buffer with the slice points, and an action to execute when done
~nmfccs = 13;
Routine{
var features_buf = Buffer(s); // a buffer for writing the MFCC analyses into
var stats_buf = Buffer(s); // a buffer for writing the statistical summary of the MFCC analyses into
var flat_buf = Buffer(s); // a buffer for writing only he mean MFCC values into
var dataset = FluidDataSet(s); // the dataset that all of these analyses will be stored in
slices_buffer.loadToFloatArray(action:{ // get the indices from the server loaded down to the language
arg slices_array;
// iterate over each index in this array, paired with this next neighbor so that we know where to start
// and stop the analysis
slices_array.doAdjacentPairs{
arg start_frame, end_frame, slice_index;
var num_frames = end_frame - start_frame;
"analyzing slice: % / %".format(slice_index + 1,slices_array.size - 1).postln;
// mfcc analysis, hop over that 0th coefficient because it relates to loudness and here we want to focus on timbre
FluidBufMFCC.process(s,audio_buffer,start_frame,num_frames,features:features_buf,startCoeff:1,numCoeffs:~nmfccs).wait;
// get a statistical summary of the MFCC analysis for this slice
FluidBufStats.process(s,features_buf,stats:stats_buf).wait;
// extract and flatten just the 0th frame (numFrames:1) of the statistical summary (because that is the mean)
FluidBufFlatten.process(s,stats_buf,numFrames:1,destination:flat_buf).wait;
// now that the means are extracted and flattened, we can add this datapoint to the dataset:
dataset.addPoint("slice-%".format(slice_index),flat_buf);
};
});
action.value(dataset); // execute the function and pass in the dataset that was created!
}.play;
};
)
(
// =================== 4. DO THE ANALYSIS =====================
~analyze_to_dataset.(~source_buf,~source_indices_buf,{ // pass in the audio buffer of the source, and the slice points
arg ds;
~source_dataset = ds; // set the ds to a global variable so we can access it later
~source_dataset.print;
});
)
/*
=================================================
| |
| LOAD AND ANALYZE THE TARGET |
| |
=================================================
*/
(
// ============= 5. LOAD THE FILE ===================
~target_path = File.realpath(FluidBufPitch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Nicol-LoopE-M.wav";
~target_buf = Buffer.read(s,~target_path);
)
(
// ============= 6. SLICE ===================
~target_indices_buf = Buffer(s);
FluidBufOnsetSlice.process(s,~target_buf,indices:~target_indices_buf,metric:9,threshold:0.5,action:{
~target_indices_buf.loadToFloatArray(action:{
arg indices_array;
// post the results so that you can tweak the parameters and get what you want
"found % slices".format(indices_array.size-1).postln;
"average length: % seconds".format((~target_buf.duration / (indices_array.size-1)).round(0.001)).postln;
})
});
)
(
// =========== 7. USE THE SAME ANALYSIS FUNCTION
~analyze_to_dataset.(~target_buf,~target_indices_buf,{
arg ds;
~target_dataset = ds;
~target_dataset.print;
});
)
(
// ======================= 8. TEST DRUM LOOP PLAYBACK ====================
// play back the drum slices with a .wait in between so we hear the drum loop
Routine{
~target_indices_buf.loadToFloatArray(action:{
arg target_indices_array;
// prepend 0 (the start of the file) to the indices array
target_indices_array = [0] ++ target_indices_array;
// append the total number of frames to know how long to play the last slice for
target_indices_array = target_indices_array ++ [~target_buf.numFrames];
inf.do{ // loop for infinity
arg i;
// get the index to play by modulo one less than the number of slices (we don't want to *start* playing from the
// last slice point, because that's the end of the file!)
var index = i % (target_indices_array.size - 1);
// nb. that the minus one is so that the drum slice from the beginning of the file to the first index is call "-1"
// this is because that slice didn't actually get analyzed
var slice_id = index - 1;
var start_frame = target_indices_array[index];
var dur_frames = target_indices_array[index + 1] - start_frame;
var dur_secs = dur_frames / ~target_buf.sampleRate;
"playing slice: %".format(slice_id).postln;
{
var sig = PlayBuf.ar(1,~target_buf,BufRateScale.ir(~target_buf),0,start_frame,0,2);
var env = EnvGen.kr(Env([0,1,1,0],[0.03,dur_secs-0.06,0.03]),doneAction:2);
sig = sig * env; // include this env if you like, but keep the line above because it will free the synth after the slice!
sig.dup;
}.play;
dur_secs.wait;
};
});
}.play;
)
/*
=================================================
| |
| KDTREE THE DATA AND DO THE LOOKUP |
| |
=================================================
*/
( // ========== 9. FIT THE KDTREE TO THE SOURCE DATASET SO THAT WE CAN QUICKLY LOOKUP NEIGHBORS ===============
Routine{
~kdtree = FluidKDTree(s,10);
s.sync;
~kdtree.fit(~source_dataset,{
"kdtree fit".postln;
});
}.play;
)
(
// ========= 10. A LITTLE HELPER FUNCTION THAT WILL PLAY BACK A SLICE FROM THE SOURCE BY JUST PASSING THE INDEX =============
~play_source_index = {
arg index;
{
var start_frame = Index.kr(~source_indices_buf,index); // lookup the start frame with the index *one the server* using Index.kr
var end_frame = Index.kr(~source_indices_buf,index+1); // same for the end frame
var num_frames = end_frame - start_frame;
var dur_secs = num_frames / SampleRate.ir(~source_buf);
var sig = PlayBuf.ar(1,~source_buf,BufRateScale.ir(~source_buf),0,start_frame,0,Done.freeSelf);
var env = EnvGen.kr(Env([0,1,1,0],[0.03,dur_secs-0.06,0.03]),doneAction:Done.freeSelf);
// sig = sig * env; // include this env if you like, but keep the line above because it will free the synth after the slice!
sig.dup;
}.play;
};
)
~target_dataset.print;
(
// ======================= 11. TEST DRUM LOOP PLAYBACK ====================
// play back the drum slices with a .wait in between so we hear the drum loop
// is is very similar to step 8 above, but now instead of playing the slice of
// the drum loop, it get's the analysis of the drum loop's slice into "query_buf",
// then uses that info to lookup the nearest neighbour in the source dataset and
// play that slice
Routine{
var query_buf = Buffer.alloc(s,~nmfccs); // a buffer for doing the neighbor lookup with
~target_indices_buf.loadToFloatArray(action:{
arg target_indices_array;
// prepend 0 (the start of the file) to the indices array
target_indices_array = [0] ++ target_indices_array;
// append the total number of frames to know how long to play the last slice for
target_indices_array = target_indices_array ++ [~target_buf.numFrames];
inf.do{ // loop for infinity
arg i;
// get the index to play by modulo one less than the number of slices (we don't want to *start* playing from the
// last slice point, because that's the end of the file!)
var index = i % (target_indices_array.size - 1);
// nb. that the minus one is so that the drum slice from the beginning of the file to the first index is call "-1"
// this is because that slice didn't actually get analyzed
var slice_id = index - 1;
var start_frame = target_indices_array[index];
var dur_frames = target_indices_array[index + 1] - start_frame;
// this will be used to space out the source slices according to the target timings
var dur_secs = dur_frames / ~target_buf.sampleRate;
"target slice: %".format(slice_id).postln;
// as long as this slice is not the one that starts at the beginning of the file (-1) and
// not the slice at the end of the file (because neither of these have analyses), let's
// do the lookup
if((slice_id >= 0) && (slice_id < (target_indices_array.size - 3)),{
// use the slice id to (re)create the slice identifier and load the data point into "query_buf"
~target_dataset.getPoint("slice-%".format(slice_id.asInteger),query_buf,{
// once it's loaded, use that buffer as the input to lookup the nearest
// neighbour data point in the kdtree of source slices
~kdtree.kNearest(query_buf,{
arg nearest;
var nearest_index;
nearest.postln;
// peel off just the integer part of the slice to use in the helper function
nearest_index = nearest.choose.asString.split($-)[1].asInteger;
nearest_index.postln;
~play_source_index.(nearest_index);
});
});
});
// if you want to hear the drum set along side the neighbor slices, uncomment this function
{
var sig = PlayBuf.ar(1,~target_buf,BufRateScale.ir(~target_buf),0,start_frame,0,2);
var env = EnvGen.kr(Env([0,1,1,0],[0.03,dur_secs-0.06,0.03]),doneAction:2);
// sig = sig * env; // include this env if you like, but keep the line above because it will free the synth after the slice!
sig.dup * -8.dbamp;
}.play;
dur_secs.wait;
};
});
}.play;
)

@ -1,80 +0,0 @@
(
~counter = 0;
~predicting = false;
~prediction_buf = Buffer.alloc(s,10);
Window.closeAll;
~win = Window("MLP Regressor",Rect(0,0,1000,400));
~multisliderview = MultiSliderView(~win,Rect(0,0,400,400))
.size_(10)
.elasticMode_(true)
.action_({
arg msv;
// ~synth.set(\val,msv.value);
// msv.value.postln;
~y_buf.setn(0,msv.value);
});
Slider2D(~win,Rect(400,0,400,400))
.action_({
arg s2d;
[s2d.x,s2d.y].postln;
~x_buf.setn(0,[s2d.x,s2d.y]);
if(~predicting,{
~nn.predictPoint(~x_buf,~y_buf,{
~y_buf.getn(0,10,{
{~multisliderview.value_(prediction_values)}.defer;
});
});
});
});
Button(~win,Rect(800,0,200,20))
.states_([["Add Point"]])
.action_({
arg but;
var id = "example-%".format(~counter);
~ds_input.addPoint(id,~x_buf);
~ds_output.addPoint(id,~y_buf);
~counter = ~counter + 1;
~ds_input.print;
~ds_output.print;
});
Button(~win,Rect(800,20,200,20))
.states_([["Train"]])
.action_({
arg but;
~nn.fit(~ds_input,~ds_output,{
arg loss;
"loss: %".format(loss).postln;
});
});
Button(~win,Rect(800,40,200,20))
.states_([["Not Predicting",Color.yellow,Color.black],["Is Predicting",Color.green,Color.black]])
.action_({
arg but;
~predicting = but.value.asBoolean;
});
~win.front;
~ds_input = FluidDataSet(s);
~ds_output = FluidDataSet(s);
~x_buf = Buffer.alloc(s,2);
~y_buf = Buffer.alloc(s,10);
~nn = FluidMLPRegressor(s,[7],FluidMLPRegressor.sigmoid,FluidMLPRegressor.sigmoid,learnRate:0.1,batchSize:1,validation:0);
~synth = {
//arg val = #[0,0,0,0,0,0,0,0,0,0];
var val = FluidBufToKr.kr(~y_buf)
var osc1, osc2, feed1, feed2, base1=69, base2=69, base3 = 130;
#feed2,feed1 = LocalIn.ar(2);
osc1 = MoogFF.ar(SinOsc.ar((((feed1 * val[0]) + val[1]) * base1).midicps,mul: (val[2] * 50).dbamp).atan,(base3 - (val[3] * (FluidLoudness.kr(feed2, 1, 0, hopSize: 64)[0].clip(-120,0) + 120))).lag(128/44100).midicps, val[4] * 3.5);
osc2 = MoogFF.ar(SinOsc.ar((((feed2 * val[5]) + val[6]) * base2).midicps,mul: (val[7] * 50).dbamp).atan,(base3 - (val[8] * (FluidLoudness.kr(feed1, 1, 0, hopSize: 64)[0].clip(-120,0) + 120))).lag(128/44100).midicps, val[9] * 3.5);
Out.ar(0,LeakDC.ar([osc1,osc2],mul: 0.1));
LocalOut.ar([osc1,osc2]);
}.play;
)
Loading…
Cancel
Save