From db1bbe778096ee73f80cc6ddc08229f06ad4f3bc Mon Sep 17 00:00:00 2001 From: Pierre Alexandre Tremblay Date: Sun, 7 Jun 2020 15:55:02 +0100 Subject: [PATCH 01/12] i tried. and failed. please excuse me. --- .../Classes/FluidCorpusBuilders.sc | 7 +++--- .../dataset/demo-dataset-maker-utilities.scd | 25 ++++++++++--------- 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/release-packaging/Classes/FluidCorpusBuilders.sc b/release-packaging/Classes/FluidCorpusBuilders.sc index f5a5578..81d5c0c 100644 --- a/release-packaging/Classes/FluidCorpusBuilders.sc +++ b/release-packaging/Classes/FluidCorpusBuilders.sc @@ -135,7 +135,7 @@ FluidProcessSlices{ total = jobs.size; counter = 0; completed = 0; - perf = { + perf = {|jobID| var idx,v, k = jobs.pop; v = bufIdx[k]; counter = counter + 1; @@ -150,11 +150,12 @@ FluidProcessSlices{ { var numframes,feature; numframes = v[\bounds].reverse.reduce('-'); - feature = featureFunc.value(sourceBuffer, v[\bounds][0],numframes,k,v,counter-1); + jobID.postln; + feature = featureFunc.value(sourceBuffer, v[\bounds][0], numframes, k, v, counter-1, jobID); SendReply.kr(Done.kr(feature),'/doneFeature' ++ uid ++ idx); FreeSelfWhenDone.kr(feature); }.play(server); }; - 4.do{perf.value}; + 4.do{|jobIDs|perf.value(jobIDs)}; } } diff --git a/release-packaging/Examples/dataset/demo-dataset-maker-utilities.scd b/release-packaging/Examples/dataset/demo-dataset-maker-utilities.scd index 2fcdf8c..59a7fab 100644 --- a/release-packaging/Examples/dataset/demo-dataset-maker-utilities.scd +++ b/release-packaging/Examples/dataset/demo-dataset-maker-utilities.scd @@ -1,9 +1,9 @@ // define a few processes ( ~ds = FluidDataSet(s,\test); // still need a name on the server to make sure we do not forget it exists. it is now permanent aka will resist cmd+. -~mfccbuf = Buffer.new; -~statsbuf = Buffer.new; -~flatbuf = Buffer.new; +~mfccbuf = Buffer.allocConsecutive(4,s,1); +~statsbuf = Buffer.allocConsecutive(4,s,1); +~flatbuf = Buffer.allocConsecutive(4,s,1); // here we instantiate a loader which creates a single large buffer with a dictionary of what was included in it // ~loader = FluidLoadFolder("/Volumes/machins/projets/newsfeed/smallnum/"); @@ -15,12 +15,13 @@ }); // here we instantiate a process of description and dataset writing, which will run each slice of the previous slice and write the entry. Note the chain of Done.kr triggers. -~extractor = FluidProcessSlices({|src,start, num, idx| +~extractor = FluidProcessSlices({|src,start, num, idx, a, b, i| var mfcc, stats, writer, flatten,mfccBuf, statsBuf, flatBuf; - mfcc = FluidBufMFCC.kr(src,startFrame:start,numFrames:num,numChans:1,features:~mfccbuf,trig:1); - stats = FluidBufStats.kr(~mfccbuf,stats:~statsbuf,trig:Done.kr(mfcc)); - flatten = FluidBufFlatten.kr(~statsbuf,~flatbuf,trig:Done.kr(stats)); - writer = FluidDataSetWr.kr(idx,~flatbuf,~ds,trig:Done.kr(flatten)) + [src,start, num, idx, a, b, i].postln; + mfcc = FluidBufMFCC.kr(src,startFrame:start,numFrames:num,numChans:1,features:~mfccbuf[i],trig:1); + stats = FluidBufStats.kr(~mfccbuf[i],stats:~statsbuf[i],trig:Done.kr(mfcc)); + flatten = FluidBufFlatten.kr(~statsbuf[i],~flatbuf[i],trig:Done.kr(stats)); + writer = FluidDataSetWr.kr(idx,~flatbuf[i],~ds,trig:Done.kr(flatten)) }); ) @@ -140,15 +141,15 @@ FluidBufCompose.process(s,~loader.buffer,a,(b-a),numChans: 1, destination: ~targ ( { var mfcc, stats, flatten; - mfcc = FluidBufMFCC.kr(~targetsound,features:~mfccbuf,trig:1); - stats = FluidBufStats.kr(~mfccbuf,stats:~statsbuf,trig:Done.kr(mfcc)); - flatten = FluidBufFlatten.kr(~statsbuf,~flatbuf,trig:Done.kr(stats)); + mfcc = FluidBufMFCC.kr(~targetsound,features:~mfccbuf[0],trig:1); + stats = FluidBufStats.kr(~mfccbuf[0],stats:~statsbuf[0],trig:Done.kr(mfcc)); + flatten = FluidBufFlatten.kr(~statsbuf[0],~flatbuf[0],trig:Done.kr(stats)); }.play; ) //find its nearest neighbours ~friends = Array; -~tree.kNearest(~flatbuf,5,{|x| ~friends = x.postln;}) +~tree.kNearest(~flatbuf[0],5,{|x| ~friends = x.postln;}) // play them in a row ( From ab3e0e1376fa4aa54336675a33d1aba1ba4e501a Mon Sep 17 00:00:00 2001 From: Owen Green Date: Mon, 8 Jun 2020 10:17:23 +0100 Subject: [PATCH 02/12] Condense extra featureFunc args into assocation labe->dictionary; add 'tasks' argument to play Update help to reflect --- .../Classes/FluidCorpusBuilders.sc | 21 +++---- .../Classes/FluidProcessSlices.schelp | 56 +++++++++++++------ 2 files changed, 49 insertions(+), 28 deletions(-) diff --git a/release-packaging/Classes/FluidCorpusBuilders.sc b/release-packaging/Classes/FluidCorpusBuilders.sc index 81d5c0c..7313336 100644 --- a/release-packaging/Classes/FluidCorpusBuilders.sc +++ b/release-packaging/Classes/FluidCorpusBuilders.sc @@ -115,20 +115,18 @@ FluidSliceCorpus { } FluidProcessSlices{ - var < featureFunc, labelFunc; - var < index; + var < featureFunc; - *new { |featureFunc, labelFunc| - ^super.newCopyArgs(featureFunc,labelFunc); + *new { |featureFunc| + ^super.newCopyArgs(featureFunc); } - play{ |server,sourceBuffer,bufIdx, action| + play{ |server, sourceBuffer, bufIdx, action, tasks = 4| var counter,perf,jobs,total,uid, completed; sourceBuffer ?? {"No buffer to slice".error; ^nil}; bufIdx ?? {"No slice point dictionary passed".error;^nil}; server ?? {server = Server.default}; - index = IdentityDictionary(); uid = UniqueID.next; jobs = List.newFrom(bufIdx.keys); @@ -140,22 +138,25 @@ FluidProcessSlices{ v = bufIdx[k]; counter = counter + 1; idx = counter; + v[\index] = counter; + v[\voice] = jobID; OSCFunc({ completed = completed + 1; ("FluidProcessSlices:" + (completed.asString ++ "/" ++ total)).postln; - if(jobs.size > 0){perf.value}; - if(completed == total){action !? action.value(index);}; + if(jobs.size > 0){perf.value(jobID)}; + if(completed == total){action !? action.value(v);}; },"/doneFeature" ++ uid ++ counter,server.addr).oneShot; { var numframes,feature; numframes = v[\bounds].reverse.reduce('-'); jobID.postln; - feature = featureFunc.value(sourceBuffer, v[\bounds][0], numframes, k, v, counter-1, jobID); + feature = featureFunc.value(sourceBuffer, v[\bounds][0], numframes, k->v); SendReply.kr(Done.kr(feature),'/doneFeature' ++ uid ++ idx); FreeSelfWhenDone.kr(feature); }.play(server); }; - 4.do{|jobIDs|perf.value(jobIDs)}; + tasks ?? {tasks = 4}; + tasks.asInteger.min(jobs.size).do{|jobIDs|perf.value(jobIDs)}; } } diff --git a/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp b/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp index fc4301a..2240113 100644 --- a/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp +++ b/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp @@ -6,7 +6,6 @@ related:: Classes/FluidLoadFolder, Classes/FluidSliceCorpus,Guides/FluidDecompos DESCRIPTION:: This class abstracts some of the boilerplate involved in batch processing a sequence of segments in a link::Classes/Buffer:: on the server. It does this by iteratively running a user supplied function and using slice point information passed as an link::Classes/IdentityDictionary:: (see link::Classes/FluidLoadFolder#-index:: for details on the format of this). - CLASSMETHODS:: METHOD:: new @@ -14,7 +13,11 @@ Creates a new instance ARGUMENT:: featureFunc ANCHOR::featureFunction:: -A function that will perform some processing on a section of a buffer. It is passed the following arguments +A function that will perform some processing on a section of a buffer. +warning:: +This function strong::must:: return a link::Classes/UGen:: that sets a code::done:: flag (see link::Classes/Done::), in order for the iteration and housekeeping to work. All code::FluidBuf*:: objects do this. +:: +The functions is passed the following arguments definitionlist:: ##src || The source link::Classes/Buffer:: containing the audio to process @@ -22,24 +25,35 @@ definitionlist:: || The start frame of the section to process, in samples ##num || The number of frames to process, in samples -##label -|| The label for the segment from the supplied dictionary to link::#-play:: +##data +|| anchor::datadict:: An link::Classes/Association:: of the label for this segment, with an link::Classes/IdentityDictionary:: of useful extra data: +definitionlist:: +## sr +|| The original sample rate of the segment +## numchans +|| The original channel count of the segment +## voice +|| By default link::#-play:: will run multiple jobs in parallel dependning on the link::#ntasks#tasks:: argument. This contains the task number, which allows you to maintain separate set of resources (e.g. temporary link::Classes/Buffer::s) for each task. +## index +|| The absolute count of slices processed. :: - -warning:: -This function strong::must:: return a link::Classes/UGen:: that sets a code::done:: flag (see link::Classes/Done::), in order for the iteration and housekeeping to work. All code::FluidBuf*:: objects do this. :: An example function that records statistics about the pitch of a segment in to a link::Classes/FluidDataSet:: could look like code:: -~avgPitch = { |src,start,num,label| - var pitch, stats,statsbuf; + +~featureBuffers = 4.do{Buffer.new}; + +~avgPitch = { |src,start,num,data| + var pitch, stats,statsbuf,label,voice; + label = data.key; + voice = data.value[\voice]; statsbuf = LocalBuf(7); - pitch = FluidBufPitch.kr(src,start,num,features:~someotherbuffer); - stats = FluidBufStats.kr(~someotherbuffer,stats:statsbuf,trig:Done.kr(pitch)); + pitch = FluidBufPitch.kr(src,start,num,features:~featurebuffers[voice]); + stats = FluidBufStats.kr(~featurebuffers[voice],stats:statsbuf,trig:Done.kr(pitch)); FluidDataSetWr.kr(label,statsbuf,~mydataset,Done.kr(stats)) -} +}; :: INSTANCEMETHODS:: @@ -57,7 +71,11 @@ ARGUMENT:: bufIdx An link::Classes/IdentityDictionary:: specifying labels, boundaries, sample rate and channel count for the segment. See link::Classes/FluidLoadFolder#-index:: for details. ARGUMENT:: action -A function to run when processing is complete +A function to run when processing is complete. This gets passed the same link::Classes/Association:: as link::#datadict#the processing function:: + +ARGUMENT:: tasks +ANCHOR::ntasks:: +The number of parallel processing tasks to run on the server. Default 4. This should probably never be greater than the number of available CPU cores. METHOD:: featureFunc Return the function uses by this instance. @@ -84,11 +102,13 @@ s.reboot; //In the interests of brevity, let's just take a subset of the slices and process these ~subset = IdentityDictionary.newFrom(~slicer.index.asSortedArray[0..3].flatten(1)); //write pitch statistics into a dataset -~extractor = FluidProcessSlices({|src,start,num,label,data,i| - var pitch, stats; - pitch = FluidBufPitch.kr(src,start,num,features:~pitchbufs[i]); - stats = FluidBufStats.kr(~pitchbufs[i],stats:~statsbufs[i],trig:Done.kr(pitch)); - FluidDataSetWr.kr(label,~statsbufs[i],~pitchdata,Done.kr(stats)) +~extractor = FluidProcessSlices({|src,start,num,data| + var pitch, stats, label,i; + i = data.value[\voice]; + label = data.key; + pitch = FluidBufPitch.kr(src,start,num,features:~pitchbufs[i]); + stats = FluidBufStats.kr(~pitchbufs[i],stats:~statsbufs[i],trig:Done.kr(pitch)); + FluidDataSetWr.kr(label,~statsbufs[i],~pitchdata,Done.kr(stats)) }); ~extractor.play(s,~loader.buffer,~subset,{"Feature extraction done".postln}); //view the data From 14aec9832ae7027f2fb84f4f39034dfbb8caf62b Mon Sep 17 00:00:00 2001 From: Pierre Alexandre Tremblay Date: Mon, 8 Jun 2020 10:57:28 +0100 Subject: [PATCH 03/12] sorted parallel iterations thanks to @weefuzzy --- .../Classes/FluidCorpusBuilders.sc | 1 - .../dataset/demo-dataset-maker-utilities.scd | 25 ++++++++++--------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/release-packaging/Classes/FluidCorpusBuilders.sc b/release-packaging/Classes/FluidCorpusBuilders.sc index 7313336..30f128d 100644 --- a/release-packaging/Classes/FluidCorpusBuilders.sc +++ b/release-packaging/Classes/FluidCorpusBuilders.sc @@ -150,7 +150,6 @@ FluidProcessSlices{ { var numframes,feature; numframes = v[\bounds].reverse.reduce('-'); - jobID.postln; feature = featureFunc.value(sourceBuffer, v[\bounds][0], numframes, k->v); SendReply.kr(Done.kr(feature),'/doneFeature' ++ uid ++ idx); FreeSelfWhenDone.kr(feature); diff --git a/release-packaging/Examples/dataset/demo-dataset-maker-utilities.scd b/release-packaging/Examples/dataset/demo-dataset-maker-utilities.scd index 59a7fab..3e03c5e 100644 --- a/release-packaging/Examples/dataset/demo-dataset-maker-utilities.scd +++ b/release-packaging/Examples/dataset/demo-dataset-maker-utilities.scd @@ -1,9 +1,10 @@ // define a few processes ( ~ds = FluidDataSet(s,\test); // still need a name on the server to make sure we do not forget it exists. it is now permanent aka will resist cmd+. -~mfccbuf = Buffer.allocConsecutive(4,s,1); -~statsbuf = Buffer.allocConsecutive(4,s,1); -~flatbuf = Buffer.allocConsecutive(4,s,1); +//define as many buffers as we have parallel voices/threads in the extractor processing (default is 4) +~mfccbuf = 4.collect{Buffer.new}; +~statsbuf = 4.collect{Buffer.new}; +~flatbuf = 4.collect{Buffer.new}; // here we instantiate a loader which creates a single large buffer with a dictionary of what was included in it // ~loader = FluidLoadFolder("/Volumes/machins/projets/newsfeed/smallnum/"); @@ -15,13 +16,14 @@ }); // here we instantiate a process of description and dataset writing, which will run each slice of the previous slice and write the entry. Note the chain of Done.kr triggers. -~extractor = FluidProcessSlices({|src,start, num, idx, a, b, i| - var mfcc, stats, writer, flatten,mfccBuf, statsBuf, flatBuf; - [src,start, num, idx, a, b, i].postln; - mfcc = FluidBufMFCC.kr(src,startFrame:start,numFrames:num,numChans:1,features:~mfccbuf[i],trig:1); - stats = FluidBufStats.kr(~mfccbuf[i],stats:~statsbuf[i],trig:Done.kr(mfcc)); - flatten = FluidBufFlatten.kr(~statsbuf[i],~flatbuf[i],trig:Done.kr(stats)); - writer = FluidDataSetWr.kr(idx,~flatbuf[i],~ds,trig:Done.kr(flatten)) +~extractor = FluidProcessSlices({|src,start,num,data| + var mfcc, stats, writer, flatten,mfccBuf, statsBuf, flatBuf, label, voice; + label = data.key; + voice = data.value[\voice]; + mfcc = FluidBufMFCC.kr(src,startFrame:start,numFrames:num,numChans:1,features:~mfccbuf[voice],trig:1); + stats = FluidBufStats.kr(~mfccbuf[voice],stats:~statsbuf[voice],trig:Done.kr(mfcc)); + flatten = FluidBufFlatten.kr(~statsbuf[voice],~flatbuf[voice],trig:Done.kr(stats)); + writer = FluidDataSetWr.kr(label,~flatbuf[voice],~ds,trig:Done.kr(flatten)) }); ) @@ -119,9 +121,8 @@ t = Main.elapsedTime; // write the dataset to file with the native JSON ~ds.write("/tmp/sc-dataset.json") -~ds.clear -// open the file in your favourite json editor +// open the file in your default json editor "open /tmp/sc-dataset.json".unixCmd ////////////////////////////////////////////////////////////////////////// From fb64db6d50477154dabc2fea3de1098b7547e592 Mon Sep 17 00:00:00 2001 From: Pierre Alexandre Tremblay Date: Mon, 8 Jun 2020 11:22:49 +0100 Subject: [PATCH 04/12] typo corrections in new helpfiles --- .../HelpSource/Classes/FluidBufFlatten.schelp | 3 ++- .../HelpSource/Classes/FluidDataSetWr.schelp | 2 ++ .../HelpSource/Classes/FluidProcessSlices.schelp | 13 ++++++++++--- .../HelpSource/Classes/FluidSliceCorpus.schelp | 8 +++++--- 4 files changed, 19 insertions(+), 7 deletions(-) diff --git a/release-packaging/HelpSource/Classes/FluidBufFlatten.schelp b/release-packaging/HelpSource/Classes/FluidBufFlatten.schelp index a70f609..1b744b0 100644 --- a/release-packaging/HelpSource/Classes/FluidBufFlatten.schelp +++ b/release-packaging/HelpSource/Classes/FluidBufFlatten.schelp @@ -82,10 +82,11 @@ b = Buffer.read(s,~randomsoundfile.path,action:{"Sound Loaded".postln}); FluidBufPitch.process(s,b,numFrames:512 * 10,numChans:1,features:~pitchdata,action:{"Pitch Analysis Done".postln}); // Flatten and print the flat buffer. We expect to see larger numbers (20-2000) interleaved with smaller (0-1) +( FluidBufFlatten.process(s,~pitchdata,~flatdata,action:{ ~flatdata.loadToFloatArray(action:{ |a| a.postln; }) }) - +) :: \ No newline at end of file diff --git a/release-packaging/HelpSource/Classes/FluidDataSetWr.schelp b/release-packaging/HelpSource/Classes/FluidDataSetWr.schelp index b066740..7ec2a64 100644 --- a/release-packaging/HelpSource/Classes/FluidDataSetWr.schelp +++ b/release-packaging/HelpSource/Classes/FluidDataSetWr.schelp @@ -33,10 +33,12 @@ s.reboot; ~ds = FluidDataSet(s,\FluidDataSetWr); ) +( { var b = LocalBuf.newFrom([0,1,2,3]); FreeSelfWhenDone.kr(FluidDataSetWr.kr("help_data_point",b,~ds)); }.play(s); +) ~ds.print; diff --git a/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp b/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp index 2240113..98f89c1 100644 --- a/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp +++ b/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp @@ -49,7 +49,7 @@ code:: var pitch, stats,statsbuf,label,voice; label = data.key; voice = data.value[\voice]; - statsbuf = LocalBuf(7); + statsbuf = LocalBuf(7,4); pitch = FluidBufPitch.kr(src,start,num,features:~featurebuffers[voice]); stats = FluidBufStats.kr(~featurebuffers[voice],stats:statsbuf,trig:Done.kr(pitch)); FluidDataSetWr.kr(label,statsbuf,~mydataset,Done.kr(stats)) @@ -84,7 +84,8 @@ EXAMPLES:: code:: s.reboot; -//Load all the Fluid Corpus Manipulation audio f +//Load all the Fluid Corpus Manipulation audio files + ( ~path = File.realpath(FluidLoadFolder.class.filenameSymbol).dirname +/+ "../AudioFiles"; ~loader = FluidLoadFolder(~path); @@ -96,12 +97,15 @@ s.reboot; ~pitchbufs = 4.collect{Buffer.new}; ~statsbufs = 4.collect{Buffer.new}; ) + //segment ~slicer.play(s,~loader.buffer,~loader.index,{|dataDictionary| "Slicing done".postln;}); //In the interests of brevity, let's just take a subset of the slices and process these -~subset = IdentityDictionary.newFrom(~slicer.index.asSortedArray[0..3].flatten(1)); +~subset = IdentityDictionary.newFrom(~slicer.index.asSortedArray[0..7].flatten(1)); + //write pitch statistics into a dataset +( ~extractor = FluidProcessSlices({|src,start,num,data| var pitch, stats, label,i; i = data.value[\voice]; @@ -110,7 +114,10 @@ s.reboot; stats = FluidBufStats.kr(~pitchbufs[i],stats:~statsbufs[i],trig:Done.kr(pitch)); FluidDataSetWr.kr(label,~statsbufs[i],~pitchdata,Done.kr(stats)) }); +) + ~extractor.play(s,~loader.buffer,~subset,{"Feature extraction done".postln}); + //view the data ~pitchdata.print :: diff --git a/release-packaging/HelpSource/Classes/FluidSliceCorpus.schelp b/release-packaging/HelpSource/Classes/FluidSliceCorpus.schelp index c69722b..4c1783c 100644 --- a/release-packaging/HelpSource/Classes/FluidSliceCorpus.schelp +++ b/release-packaging/HelpSource/Classes/FluidSliceCorpus.schelp @@ -6,7 +6,7 @@ related:: Classes/FluidLoadFolder, Classes/FluidProcessSlices, Classes/FluidBufO DESCRIPTION:: A utility class that abstracts the boiler plate code involved with batch slicing a buffer containing distinct chunks of audio (a 'corpus' for these purposes). -Whilst this class is designed to be used most easily in conjunction with link::Classes/FluidLoadFolder::, it doesn't have to be. However, it does excpect to be passed an link::Classes/IdentityDictionary:: of a particular format (see link::#indexFormat#description below::). +Whilst this class is designed to be used most easily in conjunction with link::Classes/FluidLoadFolder::, it doesn't have to be. However, it does expect to be passed an link::Classes/IdentityDictionary:: of a particular format (see link::#indexFormat#description below::). The actual mechanism for doing the slicing is provided by the user, in the form of a function that will form part of a larger link::Classes/Synth:: (see link::#sliceFuncDescription#below::). @@ -88,10 +88,13 @@ s.reboot ~loader.play(s,action:{ |dataDictionary| "Done loading".postln}); ) +( ~slicer = FluidSliceCorpus({ |src,start,num,dest| FluidBufOnsetSlice.kr(src,start,num,indices:dest, threshold:0.7) }); +) +( ~slicer.play(s,~loader.buffer,~loader.index,{|dataDictionary| "Slicing done".postln; //we get passed an IdentityDictionary of slice data, let's look at it @@ -102,6 +105,5 @@ s.reboot ')'.postln; } }); - - +) :: From 0d32688d29f9b9a6d746dc438171abc9844d32f2 Mon Sep 17 00:00:00 2001 From: Owen Green Date: Mon, 8 Jun 2020 12:14:27 +0100 Subject: [PATCH 05/12] Simplify example fuction by strapping numChans to 1 --- .../HelpSource/Classes/FluidProcessSlices.schelp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp b/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp index 98f89c1..a764c05 100644 --- a/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp +++ b/release-packaging/HelpSource/Classes/FluidProcessSlices.schelp @@ -49,9 +49,9 @@ code:: var pitch, stats,statsbuf,label,voice; label = data.key; voice = data.value[\voice]; - statsbuf = LocalBuf(7,4); - pitch = FluidBufPitch.kr(src,start,num,features:~featurebuffers[voice]); - stats = FluidBufStats.kr(~featurebuffers[voice],stats:statsbuf,trig:Done.kr(pitch)); + statsbuf = LocalBuf(7); + pitch = FluidBufPitch.kr(src,start,num,numChans:1,features:~featurebuffers[voice]); + stats = FluidBufStats.kr(~featurebuffers[voice],numChans:1, stats:statsbuf,trig:Done.kr(pitch)); FluidDataSetWr.kr(label,statsbuf,~mydataset,Done.kr(stats)) }; :: From c1f7725d71748d9c3807f0d8ed607adf78f4f31b Mon Sep 17 00:00:00 2001 From: Gerard Date: Mon, 8 Jun 2020 20:32:48 +0100 Subject: [PATCH 06/12] FluidManipulationJSON: parse JSON manually to avoid change of types --- .../Classes/FluidManipulationJSON.sc | 57 ++++++++++++++----- 1 file changed, 42 insertions(+), 15 deletions(-) diff --git a/release-packaging/Classes/FluidManipulationJSON.sc b/release-packaging/Classes/FluidManipulationJSON.sc index 216e577..780d731 100644 --- a/release-packaging/Classes/FluidManipulationJSON.sc +++ b/release-packaging/Classes/FluidManipulationJSON.sc @@ -1,40 +1,67 @@ + FluidManipulationClient { tmpJSONFilename{ - ^Platform.defaultTempDir++"tmp_fluid_dataset_"++ - Date.localtime.stamp++".json"; + ^Platform.defaultTempDir++"tmp_fluid_data_"++ + Date.localtime.stamp++"_"++UniqueID.next++".json"; } dump {|action| var filename = this.tmpJSONFilename; action ?? {action = postit}; - this.write(filename, { - action.value(filename.parseYAMLFile); - File.delete(filename); - }); + this.write(filename, { + action.value(this.parseJSON(File.readAllString(filename))); + File.delete(filename); + }); } load{|dict, action| var filename = this.tmpJSONFilename; - var str = this.asJSON(dict); - File.use(filename, "w", { |f| f.write(this.asJSON(dict));}); + File.use(filename, "wt", { |f| f.write(this.asJSON(dict));}); this.read(filename, { - action.notNil.if{ action.value }; - File.delete(filename); + action.notNil.if{ action.value; }; + File.delete(filename); + }); + } + + toDict{|obj| + var converted; + if(obj.class === Event){ + converted = obj.as(Dictionary); + converted.keysValuesChange{|k,v|this.toDict(v)} + ^converted; + }; + if(obj.class === Array){ + converted = obj.collect{|v| this.toDict(v)}; + ^converted; + }; + ^obj; + } + + parseJSON{|jsonStr| + var parsed = jsonStr; + jsonStr.do({|char,pos| + var inString = false; + char.switch( + $",{(jsonStr[pos-1]==$\ && inString).not.if({inString = inString.not})}, + ${,{ if(inString.not){parsed[pos] = $(} }, + $},{ if(inString.not){parsed[pos] = $)} } + ) }); + ^this.toDict(parsed.interpret); } asJSON{|d| - if(d.isString || d.isNumber){^d}; - if(d.isKindOf(Dictionary), + if(d.isNumber){^d}; + if(d.isString){^d.asCompileString}; + if(d.isKindOf(Dictionary)) { ^"{" ++ ( d.keys.asList.collect{|k| k.asString.asCompileString ++ ":" + this.asJSON(d[k]) }).join(", ") ++ "}" - }); - if(d.isKindOf(SequenceableCollection), + }; + if(d.isKindOf(SequenceableCollection)) { ^"[" ++ d.collect({|x|this.asJSON(x)}).join(", ")++ "]"; - }); + }; } } From 253339aac2dbd17737f249044db621dc9d0c490b Mon Sep 17 00:00:00 2001 From: Owen Green Date: Mon, 8 Jun 2020 22:01:34 +0100 Subject: [PATCH 07/12] Harden message dispatch against cmd+. by using more safely shared state --- include/FluidSCWrapper.hpp | 47 ++++++++++++++++++++++---------------- 1 file changed, 27 insertions(+), 20 deletions(-) diff --git a/include/FluidSCWrapper.hpp b/include/FluidSCWrapper.hpp index 1c52928..8323503 100644 --- a/include/FluidSCWrapper.hpp +++ b/include/FluidSCWrapper.hpp @@ -847,6 +847,7 @@ class FluidSCWrapper : public impl::FluidSCWrapperBase using FloatControlsIter = impl::FloatControlsIter; + using SharedState = std::shared_ptr>; //I would like to template these to something more scaleable, but baby steps friend class impl::RealTime; friend class impl::NonRealTime; @@ -1185,7 +1186,7 @@ class FluidSCWrapper : public impl::FluidSCWrapperBase struct MessageDispatch { static constexpr size_t Message = N; - FluidSCWrapper* wrapper; + SharedState state; ArgTuple args; Ret result; std::string name; @@ -1226,7 +1227,7 @@ class FluidSCWrapper : public impl::FluidSCWrapperBase void* msgptr = ft->fRTAlloc(x->mWorld, sizeof(MessageData)); MessageData* msg = new (msgptr) MessageData; msg->name = '/' + Client::getMessageDescriptors().template name(); - msg->wrapper = x; + msg->state = x->state(); ArgTuple& args = msg->args; // type check OSC message @@ -1306,10 +1307,10 @@ class FluidSCWrapper : public impl::FluidSCWrapperBase { MessageData* m = static_cast(data); m->result = - ReturnType{invokeImpl(m->wrapper, m->args, IndexList{})}; + ReturnType{invokeImpl(m->state, m->args, IndexList{})}; if (!m->result.ok()) - printResult(m->wrapper, m->result); + printResult(m->state, m->result); return true; }, @@ -1321,11 +1322,12 @@ class FluidSCWrapper : public impl::FluidSCWrapperBase world); if(m->result.status() != Result::Status::kError) - messageOutput(m->wrapper, m->name, m->result); + messageOutput(m->state, m->name, m->result); else { - auto ft = getInterfaceTable(); - ft->fSendNodeReply(&m->wrapper->mParent->mNode, + auto ft = getInterfaceTable(); + if(m->state->mNodeAlive) + ft->fSendNodeReply(m->state->mNode, -1, m->name.c_str(),0, nullptr); } return true; @@ -1341,54 +1343,59 @@ class FluidSCWrapper : public impl::FluidSCWrapperBase } template // Call from NRT - static decltype(auto) invokeImpl(FluidSCWrapper* x, ArgsTuple& args, + static decltype(auto) invokeImpl(SharedState& x, ArgsTuple& args, std::index_sequence) { - return x->client().template invoke(x->client(), std::get(args)...); + return x->client.template invoke(x->client, std::get(args)...); } template // call from RT - static void messageOutput(FluidSCWrapper* x, const std::string& s, + static void messageOutput(SharedState& x, const std::string& s, MessageResult& result) { auto ft = getInterfaceTable(); // allocate return values index numArgs = ToFloatArray::allocSize(static_cast(result)); + if(!x->mNodeAlive) return; + float* values = static_cast( - ft->fRTAlloc(x->mWorld, asUnsigned(numArgs) * sizeof(float))); + ft->fRTAlloc(x->mNode->mWorld, asUnsigned(numArgs) * sizeof(float))); // copy return data ToFloatArray::convert(values, static_cast(result)); - ft->fSendNodeReply(&x->mParent->mNode, -1, s.c_str(), + ft->fSendNodeReply(x->mNode, -1, s.c_str(), static_cast(numArgs), values); } - static void messageOutput(FluidSCWrapper* x, const std::string& s, + static void messageOutput(SharedState& x, const std::string& s, MessageResult&) { auto ft = getInterfaceTable(); - ft->fSendNodeReply(&x->mParent->mNode, -1, s.c_str(), 0, nullptr); + if(!x->mNodeAlive) return; + ft->fSendNodeReply(x->mNode, -1, s.c_str(), 0, nullptr); } template - static void messageOutput(FluidSCWrapper* x, const std::string& s, + static void messageOutput(SharedState& x, const std::string& s, MessageResult>& result) { auto ft = getInterfaceTable(); std::array offsets; index numArgs; + if(!x->mNodeAlive) return; + std::tie(offsets, numArgs) = ToFloatArray::allocSize(static_cast>(result)); float* values = static_cast( - ft->fRTAlloc(x->mWorld, asUnsigned(numArgs) * sizeof(float))); + ft->fRTAlloc(x->mNode->mWorld, asUnsigned(numArgs) * sizeof(float))); ToFloatArray::convert(values, std::tuple(result), offsets, std::index_sequence_for()); - ft->fSendNodeReply(&x->mParent->mNode, -1, s.c_str(), + ft->fSendNodeReply(x->mNode, -1, s.c_str(), static_cast(numArgs), values); } @@ -1464,14 +1471,14 @@ public: return p; } - static void printResult(FluidSCWrapper* x, Result& r) + static void printResult(SharedState& x, Result& r) { - if (!x) return; + if (!x.get() || !x->mNodeAlive) return; switch (r.status()) { case Result::Status::kWarning: { - if (x->mWorld->mVerbosity > 0) + if (x->mNode->mWorld->mVerbosity > 0) std::cout << "WARNING: " << r.message().c_str() << '\n'; break; } From 7dc44c738ee40d3f608c69a38550c77def440210 Mon Sep 17 00:00:00 2001 From: Owen Green Date: Mon, 8 Jun 2020 22:34:24 +0100 Subject: [PATCH 08/12] Don't try and send message responses > 8kb --- include/FluidSCWrapper.hpp | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/include/FluidSCWrapper.hpp b/include/FluidSCWrapper.hpp index 8323503..80d88ff 100644 --- a/include/FluidSCWrapper.hpp +++ b/include/FluidSCWrapper.hpp @@ -1354,10 +1354,16 @@ class FluidSCWrapper : public impl::FluidSCWrapperBase MessageResult& result) { auto ft = getInterfaceTable(); + if(!x->mNodeAlive) return; + // allocate return values index numArgs = ToFloatArray::allocSize(static_cast(result)); - - if(!x->mNodeAlive) return; + + if(numArgs > 2048) + { + std::cout << "ERROR: Message response too big to send (" << asUnsigned(numArgs) * sizeof(float) << " bytes)." << std::endl; + return; + } float* values = static_cast( ft->fRTAlloc(x->mNode->mWorld, asUnsigned(numArgs) * sizeof(float))); @@ -1389,6 +1395,12 @@ class FluidSCWrapper : public impl::FluidSCWrapperBase std::tie(offsets, numArgs) = ToFloatArray::allocSize(static_cast>(result)); + if(numArgs > 2048) + { + std::cout << "ERROR: Message response too big to send (" << asUnsigned(numArgs) * sizeof(float) << " bytes)." << std::endl; + return; + } + float* values = static_cast( ft->fRTAlloc(x->mNode->mWorld, asUnsigned(numArgs) * sizeof(float))); From 1279678eea9fd6df03c0346c9cbc7c2fba1c5b45 Mon Sep 17 00:00:00 2001 From: Owen Green Date: Mon, 8 Jun 2020 22:35:19 +0100 Subject: [PATCH 09/12] FluidDataSet remove dump (now in parent class) --- release-packaging/Classes/FluidDataSet.sc | 5 ----- 1 file changed, 5 deletions(-) diff --git a/release-packaging/Classes/FluidDataSet.sc b/release-packaging/Classes/FluidDataSet.sc index b0253a5..8d2d845 100644 --- a/release-packaging/Classes/FluidDataSet.sc +++ b/release-packaging/Classes/FluidDataSet.sc @@ -90,11 +90,6 @@ FluidDataSet : FluidManipulationClient { this.prSendMsg(\print,[],action,[string(FluidMessageResponse,_,_)]); } - dump { |action| - action ?? {action = postit}; - this.prSendMsg(\dump,[],action,[string(FluidMessageResponse,_,_)]); - } - free { serverCaches.remove(server,id); super.free; From dc2fecef799ca9394d362ead6ff79762e07bd7b5 Mon Sep 17 00:00:00 2001 From: Owen Green Date: Tue, 9 Jun 2020 11:08:31 +0100 Subject: [PATCH 10/12] Fix DataSet help to take Dictionary in dump action, not String --- release-packaging/HelpSource/Classes/FluidDataSet.schelp | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/release-packaging/HelpSource/Classes/FluidDataSet.schelp b/release-packaging/HelpSource/Classes/FluidDataSet.schelp index ec85964..1359c91 100644 --- a/release-packaging/HelpSource/Classes/FluidDataSet.schelp +++ b/release-packaging/HelpSource/Classes/FluidDataSet.schelp @@ -156,11 +156,7 @@ Routine{ ~ds.print //to post window by default, but you can supply a custom action instead ~ds.dump //likewise //for example -( -~ds.dump{|j| - ~dict = j.parseJSON -} -) +~ds.dump{ |d| ~dict = d } //Now we have a Dictionary of our data and IDs ~dict.postcs From 18d5b35e1e9696fb5ed19aaa935fb58e81d440d8 Mon Sep 17 00:00:00 2001 From: Gerard Date: Tue, 9 Jun 2020 11:58:35 +0100 Subject: [PATCH 11/12] read and write for KNNClassifier/Regressor --- release-packaging/Classes/FluidKNNClassifier.sc | 9 +++++++++ release-packaging/Classes/FluidKNNRegressor.sc | 9 +++++++++ 2 files changed, 18 insertions(+) diff --git a/release-packaging/Classes/FluidKNNClassifier.sc b/release-packaging/Classes/FluidKNNClassifier.sc index 1314812..3fb9e47 100644 --- a/release-packaging/Classes/FluidKNNClassifier.sc +++ b/release-packaging/Classes/FluidKNNClassifier.sc @@ -25,4 +25,13 @@ FluidKNNClassifier : FluidManipulationClient { [string(FluidMessageResponse,_,_)] ); } + + read{|filename,action| + this.prSendMsg(\read,[filename.asString],action); + } + + write{|filename,action| + this.prSendMsg(\write,[filename.asString],action); + } + } \ No newline at end of file diff --git a/release-packaging/Classes/FluidKNNRegressor.sc b/release-packaging/Classes/FluidKNNRegressor.sc index 74c0023..46916b1 100644 --- a/release-packaging/Classes/FluidKNNRegressor.sc +++ b/release-packaging/Classes/FluidKNNRegressor.sc @@ -26,4 +26,13 @@ FluidKNNRegressor : FluidManipulationClient { this.prSendMsg(\predictPoint, [buffer.asUGenInput, k,uniform], action, [number(FluidMessageResponse,_,_)]); } + + read{|filename,action| + this.prSendMsg(\read,[filename.asString],action); + } + + write{|filename,action| + this.prSendMsg(\write,[filename.asString],action); + } + } From 2cf8b8cceda516c613e6b0e047800c7b00fea41c Mon Sep 17 00:00:00 2001 From: Pierre Alexandre Tremblay Date: Tue, 9 Jun 2020 16:01:50 +0100 Subject: [PATCH 12/12] added a learning example of dictionary -> dataset / labelset --- .../super-simple-dictionary-json-example.scd | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 release-packaging/Examples/dataset/super-simple-dictionary-json-example.scd diff --git a/release-packaging/Examples/dataset/super-simple-dictionary-json-example.scd b/release-packaging/Examples/dataset/super-simple-dictionary-json-example.scd new file mode 100644 index 0000000..c1d33d4 --- /dev/null +++ b/release-packaging/Examples/dataset/super-simple-dictionary-json-example.scd @@ -0,0 +1,45 @@ +// create the data dictionary +~data = Dictionary.new +7.do{|i| ~data.add(("entry-"++i).asSymbol -> 10.collect{|j|j/10 + i})} + +// nest that dictionary in the dataset format, adding the number of columns +~dict = Dictionary.new +~dict.add(\data -> ~data) +~dict.add(\cols -> 10) + +//create a dataset, then loading the dictionary +~ds = FluidDataSet.new(s,\simple1data); +~ds.load(~dict) +~ds.print + +//fun with kdtree to see it actually works +~kdtree = FluidKDTree.new(s) +~kdtree.fit(~ds,{\done.postln;}) + +~target = Buffer.loadCollection(s,(4).dup(10)); +~kdtree.kNearest(~target,5,{|a|a.postln;}) +~kdtree.kNearestDist(~target,5,{|a|a.postln;}) + + +///////////////////////////////////////////// +// creating a labelset the same way + +// creating the data dictionary +~data2 = Dictionary.new +7.do{|i| ~data2.add(("entry-"++i).asSymbol -> (if( i.odd, {["odd"]},{["even"]})))} + +// nesting again +~dict2 = Dictionary.new +~dict2.add(\data -> ~data2) +~dict2.add(\cols -> 1) + +// creating a labelset and loading the dictionary +~ls = FluidLabelSet.new(s,\simplelabel); +~ls.load(~dict2) +~ls.print + +// testin with a classifier toy example +~classifier = FluidKNNClassifier.new(s); +~classifier.fit(~ds,~ls, {\done.postln;}) + +~classifier.predictPoint(~target,2,action: {|x|x.postln;}) \ No newline at end of file