Merge branch 'FluidRobustScale' into clients/inter_client_comms

nix
Owen Green 5 years ago
commit 362d227040

@ -898,6 +898,13 @@ namespace impl {
RegisterUnitIf<IsRTQueryModel,NRTModelQueryUnit>()(ft);
Client::getMessageDescriptors().template iterate<SetupMessageCmd>();
static std::string flushCmd = std::string(Wrapper::getName()) + "/flush";
ft->fDefinePlugInCmd(flushCmd.c_str(),[](World*, void*, struct sc_msg_iter*, void* ){
mCache.clear();
},nullptr);
}

@ -1,7 +1,7 @@
FluidAudioTransport : UGen {
init { |...theInputs|
theInputs.postln;
theInputs;
inputs = theInputs;
this.specialIndex = 1; //two audio inputs
// ^this.initOutputs(1,rate);

@ -0,0 +1,68 @@
FluidRobustScale : FluidRealTimeModel {
var <>low, <>high, <>invert;
*new {|server, low = 25, high = 75, invert = 0|
^super.new(server,[low,high,invert])
.low_(low).high_(high).invert_(invert);
}
prGetParams{
^[this.low,this.high,this.invert,-1,-1];
}
fitMsg{|dataSet|
^this.prMakeMsg(\fit,id,dataSet.id)
}
fit{|dataSet, action|
actions[\fit] = [nil,action];
this.prSendMsg(this.fitMsg(dataSet));
}
transformMsg{|sourceDataSet, destDataSet|
^this.prMakeMsg(\transform,id,sourceDataSet.id,destDataSet.id);
}
transform{|sourceDataSet, destDataSet, action|
actions[\transform] = [nil,action];
this.prSendMsg(this.transformMsg(sourceDataSet, destDataSet));
}
fitTransformMsg{|sourceDataSet, destDataSet|
^this.prMakeMsg(\fitTransform,id,sourceDataSet.id,destDataSet.id)
}
fitTransform{|sourceDataSet, destDataSet, action|
actions[\fitTransform] = [nil,action];
this.prSendMsg(this.fitTransformMsg(sourceDataSet, destDataSet));
}
transformPointMsg{|sourceBuffer, destBuffer|
^this.prMakeMsg(\transformPoint,id,
this.prEncodeBuffer(sourceBuffer),
this.prEncodeBuffer(destBuffer),
["/b_query",destBuffer.asUGenInput]
);
}
transformPoint{|sourceBuffer, destBuffer, action|
actions[\transformPoint] = [nil,{action.value(destBuffer)}];
this.prSendMsg(this.transformPointMsg(sourceBuffer, destBuffer));
}
kr{|trig, inputBuffer,outputBuffer,low,high,invert|
low = low ? this.low;
high = high ? this.high;
invert = invert ? this.invert;
this.low_(low).high_(high).invert_(invert);
^FluidProxyUgen.kr(this.class.name.asString++'/query', K2A.ar(trig),
id, this.low, this.high, this.invert, this.prEncodeBuffer(inputBuffer), this.prEncodeBuffer(outputBuffer));
}
}

@ -15,6 +15,7 @@ FluidServerObject
serverCaches[this] ?? {serverCaches[this] = FluidServerCache.new};
serverCaches[this].initCache(server);
NotificationCenter.register(server,\newAllocators,this,{ count = 0; });
ServerBoot.add({this.flush(Server.internal)},Server.internal);
}
*newMsg{|id, params|
@ -81,6 +82,9 @@ FluidServerObject
*objectClassName { ^this.name.asSymbol }
*flushMsg { ^['/cmd',this.objectClassName ++ '/flush'] }
*flush {|server| server.listSendMsg(this.flushMsg)}
}
FluidBufProcessor : FluidServerObject
@ -229,7 +233,7 @@ FluidDataObject : FluidServerObject
}
*cachedInstanceAt{|server,id|
this.initCache;
this.initCache(server);
^serverCaches[this].at(server,id);
}

@ -1,15 +1,14 @@
FluidUMAP : FluidModelObject {
FluidUMAP : FluidRealTimeModel {
var <>numDimensions, <>numNeighbours, <>minDist, <>iterations, <>learnRate, <>batchSize;
var <>numDimensions, <>numNeighbours, <>minDist, <>iterations, <>learnRate;
*new {|server,numDimensions = 2, numNeighbours = 15, minDist = 0.1, iterations = 200, learnRate = 0.1, batchSize = 50|
^super.new(server,[numDimensions, numNeighbours, minDist, iterations, learnRate, batchSize])
*new {|server,numDimensions = 2, numNeighbours = 15, minDist = 0.1, iterations = 200, learnRate = 0.1|
^super.new(server,[numDimensions, numNeighbours, minDist, iterations, learnRate])
.numDimensions_(numDimensions)
.numNeighbours_(numNeighbours)
.minDist_(minDist)
.iterations_(iterations)
.learnRate_(learnRate)
.batchSize_(batchSize);
.learnRate_(learnRate);
}
prGetParams{
@ -19,7 +18,7 @@ FluidUMAP : FluidModelObject {
this.minDist,
this.iterations,
this.learnRate,
this.batchSize
-1,-1
]
}
@ -32,16 +31,55 @@ FluidUMAP : FluidModelObject {
this.prSendMsg(this.fitTransformMsg(sourceDataSet,destDataSet));
}
transformPoint{|sourceBuffer, destBuffer, action|
sourceBuffer = this.prEncodeBuffer(sourceBuffer);
destBuffer = this.prEncodeBuffer(destBuffer);
this.prSendMsg(\transformPoint,[sourceBuffer, destBuffer], action, outputBuffers:[destBuffer]);
}
fitMsg{|dataSet|
^this.prMakeMsg(\fit,id, dataSet.id);
}
fit{|dataSet, action|
actions[\fit] = [nil, action];
this.prSendMsg(this.fitMsg(dataSet));
}
transformMsg{|sourceDataSet, destDataSet|
^this.prMakeMsg(\transform, id, sourceDataSet.id, destDataSet.id);
}
transform{|sourceDataSet, destDataSet, action|
actions[\transform] = [nil, action];
this.prSendMsg(this.transformMsg(sourceDataSet,destDataSet));
}
transformPointMsg{|sourceBuffer, destBuffer|
^this.prMakeMsg(\transformPoint,id,
this.prEncodeBuffer(sourceBuffer),
this.prEncodeBuffer(destBuffer),
["/b_query",destBuffer.asUGenInput]
);
}
transformPoint{|sourceBuffer, destBuffer, action|
actions[\transformPoint] = [nil,{action.value(destBuffer)}];
this.prSendMsg(this.transformPointMsg(sourceBuffer,destBuffer));
}
kr{|trig, inputBuffer,outputBuffer,numDimensions|
numDimensions = numDimensions ? this.numDimensions;
this.numDimensions_(numDimensions);
^FluidProxyUgen.kr(this.class.name.asString++'/query', K2A.ar(trig),
id,
this.numDimensions,
this.numNeighbours,
this.minDist,
this.iterations,
this.learnRate,
this.prEncodeBuffer(inputBuffer),
this.prEncodeBuffer(outputBuffer));
}
// not implemented
cols {|action|}
read{|filename,action|}
write{|filename,action|}
size { |action|}
}

@ -274,7 +274,7 @@ fork{
~inBuf = Buffer.readChannel(s, Platform.resourceDir +/+ "sounds/a11wlk01.wav", numFrames:15000, channels:[0]);
~inBuf.play
//OR one from within
//OR one from within (but just the begining so beware of the difference!)
~inBuf = Buffer.alloc(s,15000);
~randomSlice = ~slices["data"].keys.asArray.scramble[0];
~audio_buffers[~slices["data"][~randomSlice][0]].copyData(~inBuf,srcStartAt: ~slices["data"][~randomSlice][1], numSamples: 15000.min(~slices["data"][~randomSlice][2] - (~slices["data"][~randomSlice][1])));
@ -323,8 +323,3 @@ fork{
};
}
)

@ -43,7 +43,7 @@ Routine{
}.play
)
~labels = FluidLabelSet(s,\simple1label);
~labels = FluidLabelSet(s);
~kmeans.predict(~ds,~labels, {|x| ("Size of each cluster" + x).postln})

@ -47,7 +47,7 @@ Routine{
}.play
)
~labels = FluidLabelSet(s,\simple1label);
~labels = FluidLabelSet(s);
~kmeans.predict(~ds,~labels, {|x| ("Size of each cluster" + x).postln})

@ -8,7 +8,7 @@
~dict.add(\cols -> 10)
//create a dataset, then loading the dictionary
~ds = FluidDataSet.new(s,\simple1data);
~ds = FluidDataSet(s);
~ds.load(~dict)
~ds.print
@ -33,7 +33,7 @@
~dict2.add(\cols -> 1)
// creating a labelset and loading the dictionary
~ls = FluidLabelSet.new(s,\simplelabel);
~ls = FluidLabelSet(s);
~ls.load(~dict2)
~ls.print

@ -34,10 +34,10 @@ FluidBufMelBands.process(s,~audio, features: ~melfeatures,action: {\done.postln;
dsWr = FluidDataSetWr.kr(~raw, buf: buf, trig: Done.kr(stats));
LocalOut.kr( Done.kr(dsWr));
FreeSelf.kr(count - 99);
Poll.kr(trig,count);
Poll.kr(trig,(100-count));
}.play;
)
// wait for the post window to acknoledge the job is done. Check the dataset if curious (loads of small numbers)
// wait for the count to reaches 0 in the post window. Check the dataset if curious (loads of small numbers)
~raw.print;
// normalize the input

@ -70,7 +70,7 @@ code::
// another solution: minslicelength
(
{var env, source = SinOsc.ar(320,0,LFSaw.ar(20, 0, -0.4, 0.6));
env = FluidAmpSlice.ar(source,fastRampUp: 5,fastRampDown: 50,slowRampUp: 220,slowRampDown: 220, onThreshold: 10, offThreshold: 7,floor: -60, minSliceLength: 220);
env = FluidAmpSlice.ar(source,fastRampUp: 5,fastRampDown: 50,slowRampUp: 220,slowRampDown: 220, onThreshold: 10, offThreshold: 7,floor: -60, minSliceLength: 441);
[source, env]
}.plot(0.08);
)

@ -182,7 +182,6 @@ b.play
// create a new buffer as destinations
c = Buffer.new(s);
OSCFunc.trace(false)
//run the process on them
(
// with basic params

@ -145,16 +145,16 @@ b.play
c = Buffer.new(s);
//run the process on them
(
// with basic params
Routine{
var t = Main.elapsedTime;
var proc = FluidBufAmpSlice.process(s,b, indices: c, fastRampUp: 10,fastRampDown: 2205,slowRampUp: 4410,slowRampDown: 4410, onThreshold: 10,offThreshold: 5);
proc.wait;
c.query;
(Main.elapsedTime - t).postln;
}.play
)
(
// with basic params
Routine{
var t = Main.elapsedTime;
var proc = FluidBufAmpSlice.process(s,b, indices: c, fastRampUp: 10,fastRampDown: 2205,slowRampUp: 4410,slowRampDown: 4410, onThreshold: 10,offThreshold: 5);
proc.wait;
c.query;
(Main.elapsedTime - t).postln;
}.play
)
// list the indicies of detected attacks - the two input channels have been summed.
c.getn(0,c.numFrames,{|item|(item * 2).postln;})

@ -92,7 +92,6 @@ FluidBufAudioTransport.process(s,b,source2:c,destination:d,interpolation:0.5,act
// listen to the source and the result
b.play
c.play
d.updateInfo
d.play
// more interesting sources: two cardboard bowing gestures

@ -72,8 +72,9 @@ d = Buffer.new(s);
// with basic params (basic summing of each full buffer in all dimensions)
(
Routine{
FluidBufCompose.process(s, source: b, destination: d).wait;
FluidBufCompose.process(s, source: c, destination: d, destGain: 1.0).wait;
FluidBufCompose.process(s, source: b, destination: d);
FluidBufCompose.process(s, source: c, destination: d, destGain: 1.0);
s.sync;
d.query;
d.play;
}.play;

@ -83,7 +83,7 @@ b = Buffer.read(s,~randomsoundfile.path,action:{"Sound Loaded".postln});
~flatdata = Buffer.new;
)
//Pitch analysis, writes pitches as frequecnies to chan 0, confidences [0-1] to chan 1
//Pitch analysis, writes pitches as frequencies to chan 0, confidences [0-1] to chan 1
FluidBufPitch.process(s,b,numFrames:512 * 10,numChans:1,features:~pitchdata,action:{"Pitch Analysis Done".postln});
// Flatten and print the flat buffer. We expect to see larger numbers (20-2000) interleaved with smaller (0-1)
@ -94,4 +94,13 @@ FluidBufFlatten.process(s,~pitchdata,~flatdata,axis:1,action:{
})
})
)
//changing the axis, we see all large numbers first
(
FluidBufFlatten.process(s,~pitchdata,~flatdata,axis:0,action:{
~flatdata.loadToFloatArray(action:{ |a|
a.postln;
})
})
)
::

@ -125,7 +125,7 @@ code::
d = Buffer.new(s);
e = Buffer.new(s);
)
OSCFunc.trace(true, true)
// run with basic parameters
(
Routine{

@ -175,7 +175,7 @@ y = Buffer.new(s);
~fft_size = 1024;
~frame_size = 512;
~hop_size = 256;
~which_component = 3;
~which_component = 1;
)
// matrix factorisation, requesting everything - wait for the computation time to appear.

@ -72,5 +72,6 @@ b = Buffer.read(s,~path+/+"Nicol-LoopE-M.wav")
t = Buffer.read(s,~path+/+"Tremblay-SA-UprightPianoPedalWide.wav")
o = Buffer.new
FluidBufNMFCross.process(s,t,b,o,action:{"Ding".postln})
//wait for it to be done. It is a long process.
o.play
::

@ -70,7 +70,7 @@ Routine{
// play with the scaling
FluidBufScale.process(s, b, destination: c, inputLow: 0, inputHigh: 1, outputLow: 20, outputHigh:10).wait;
// retrieve the buffer and enjoy the results.
c.getn(0,10,{|x|x.postln;})
c.getn(0,10,{|x|x.round(0.000001).postln;})
}.play
)

@ -186,7 +186,6 @@ c = Buffer.new(s);
// run the stats and send back the values
FluidBufStats.process(s, b, stats:c, numDerivs:1, action:{c.getn(0,c.numFrames * c.numChannels,{|item|d = item; d.postln})});
OSCFunc.allEnabled
//looking at the result is not easy to grasp, since it is interleaved: first number is mean of L, second is mean of R, third is stddev of L, fourth is stddev or R
//this will make it tidier - the first value of each line is Left, the second is Right

@ -59,7 +59,7 @@ c = Buffer(s)
// play with the threshold
FluidBufThresh.process(s, b, destination: c, threshold: 0.5)
// retrieve the buffer and enjoy the results.
c.getn(0,11,{|x|x.postln;})
c.getn(0,11,{|x|x.round(0.000001).postln;})
// also works in multichannel - explore the following buffer
b = Buffer.sendCollection(s,0.0.series(0.1,2.0).scramble,2)
@ -77,5 +77,5 @@ FluidBufThresh.process(s, b,startFrame: 3,numFrames: 4,startChan: 1,numChans: 1,
//enjoy
c.plot(separately: true).plotMode_(\points)
c.query
c.getn(0,4,{|x|x.postln;})
c.getn(0,4,{|x|x.round(0.000001).postln;})
::

@ -1,7 +1,7 @@
TITLE:: FluidDataSet
summary:: Container for labelled, multidimensional data
categories:: UGens>FluidManipulation
related:: Classes/FluidLabelSet, Classes/FluidKDTree, Classes/FluidKNN, Classes/FluidKMeans
related:: Classes/FluidLabelSet, Classes/FluidKDTree, Classes/FluidKMeans
DESCRIPTION::
A server-side container associating labels with multi-dimensional data. FluidDataSet is identified by its name.
@ -15,20 +15,10 @@ Create a new instance of the DataSet, with the given name. If a DataSet with thi
ARGUMENT:: server
The link::Classes/Server:: on which to create the data set.
ARGUMENT:: name
A symbol with the name of the DataSet.
returns:: The new instance
METHOD:: at
Retrieves a cached instance of a FluidDataSet with the given name, or returns nil if no such object exists.
ARGUMENT:: server
The server associated with this DataSet instance.
ARGUMENT:: name
The name of the DataSet to retrieve from the cache.
INSTANCEMETHODS::
PRIVATE:: init,id,cache
@ -63,9 +53,6 @@ Destroy the object on the server.
METHOD:: print
Post an abbreviated content of the DataSet in the window by default, but you can supply a custom action instead.
METHOD:: synth
The internal synth the object uses to communicate with the server
returns:: A link::Classes/Synth::
@ -102,7 +89,7 @@ d = Dictionary.new;
d.add(\cols -> 1);
d.add(\data -> Dictionary.newFrom(10.collect{|i|[i.asString, [i.asFloat]]}.flatten));
fork{
~ds = FluidDataSet.new(s); s.sync;
~ds = FluidDataSet.new(s);
~ds.load(d); s.sync;
~ds.dump; s.sync; ~ds.free;
}
@ -123,7 +110,6 @@ fork{
::
STRONG:: Merging Datasets::
s.dumpOSC
code::
//this is how to add items between 2 datasets.
//create 2 datasets
@ -131,7 +117,7 @@ code::
~dsA = FluidDataSet.new(s);
~dsB = FluidDataSet.new(s);
)
Dictionary.new
//feed them items with same dimensions but different labels
~dsA.load(Dictionary.newFrom([\cols, 1, \data, Dictionary.newFrom([\one,1,\two,2])]));
~dsB.load(Dictionary.newFrom([\cols, 1, \data, Dictionary.newFrom([\three,3,\four,4])]));

@ -120,9 +120,8 @@ fork{
s.sync;
~points.do{|x,i|
~tmpbuf.setn(0,x);
s.sync;
~dataSet.addPoint(i,~tmpbuf);
// s.sync
s.sync;
}
}
)
@ -137,7 +136,6 @@ fork{
// prepare a simple query
~query.filter(0,"<",0.04);
~query.addColumn(2);
s.dumpOSC
~query.transform(~dataSet, ~out);
// check the result
@ -162,9 +160,9 @@ code::
//this is how to join 2 datasets, adding columns to items with the same label
//create 3 datasets
(
~dsA = FluidDataSet.new(s,\joinA);
~dsB = FluidDataSet.new(s,\joinB);
~dsC = FluidDataSet.new(s,\joinC);
~dsA = FluidDataSet(s);
~dsB = FluidDataSet(s);
~dsC = FluidDataSet(s);
)
//feed them items with almost overlaping label lists but with different dimensions

@ -45,7 +45,7 @@ EXAMPLES::
code::
s.reboot;
(
~ds = FluidDataSet(s,\FluidDataSetWr);
~ds = FluidDataSet(s);
)
(

@ -52,12 +52,11 @@ code::
s.reboot;
(
fork{
~ds = FluidDataSet.new(s,\kdtree_help_rand2d);
d = Dictionary.with(
*[\cols -> 2,\data -> Dictionary.newFrom(
100.collect{|i| [i, [ 1.0.linrand,1.0.linrand]]}.flatten)]);
s.sync;
~ds.load(d, {~ds.print});
d = Dictionary.with(
*[\cols -> 2,\data -> Dictionary.newFrom(
100.collect{|i| [i, [ 1.0.linrand,1.0.linrand]]}.flatten)]);
~ds = FluidDataSet(s);
~ds.load(d, {~ds.print});
}
)
@ -98,7 +97,7 @@ fork{
// Limit the search to an acceptable distance in a radius
// Define a point, and observe typical distance values
~p = [ 0.2,0.2];
~p = [ 0.4,0.4];
(
~tmpbuf = Buffer.loadCollection(s, ~p, 1, {
~tree.kNearestDist(~tmpbuf,{ |a|a.postln;~nearest = a;});
@ -106,7 +105,7 @@ fork{
)
// enter a valid radius.
~tree.radius = 0.05;
~tree.radius = 0.1;
// FluidKDTree will return only values that are within that radius, up to numNeighbours values
(
~tmpbuf = Buffer.loadCollection(s, ~p, 1, {
@ -129,7 +128,7 @@ For instance, whilst fitting the tree against some n-dimensional descriptor data
code::
,(
(
Routine{
var inputBuffer = Buffer.alloc(s,2);
var outputBuffer = Buffer.alloc(s,10);//5 neighbours * 2D data points
@ -139,7 +138,7 @@ Routine{
var point = 2.collect{TRand.kr(0,1,trig)};
point.collect{|p,i| BufWr.kr([p],inputBuffer,i)};
~tree.kr(trig,inputBuffer,outputBuffer,5,nil);
Poll.kr(trig, BufRd.kr(1,outputBuffer,Array.iota(5)),5.collect{|i| "Neighbour" + i});
Poll.kr(trig, BufRd.kr(1,outputBuffer,Array.iota(10)),10.collect{|i| "Neighbour" + (i/2).asInteger ++ "-" ++ (i.mod(2))});
Silent.ar;
}.play;
}.play;
@ -148,12 +147,11 @@ Routine{
//Using a lookup data set instead:
//here we populate with numbers that are in effect the indicies, but it could be anything numerical that will be returned on the server-side and would be usable on that side
(
~dsL = FluidDataSet.new(s);
fork{
d = Dictionary.with(
*[\cols -> 1,\data -> Dictionary.newFrom(
100.collect{|i| [i, [ i ]]}.flatten)]);
s.sync;
d = Dictionary.with(
*[\cols -> 1,\data -> Dictionary.newFrom(
100.collect{|i| [i, [ i ]]}.flatten)]);
~dsL = FluidDataSet.new(s);
~dsL.load(d, {~dsL.print});
}
)
@ -169,10 +167,9 @@ Routine{
var point = 2.collect{TRand.kr(0,1,trig)};
point.collect{|p,i| BufWr.kr([p],inputBuffer,i)};
~tree.kr(trig,inputBuffer,outputBuffer,5,~dsL);
Poll.kr(trig, BufRd.kr(1,outputBuffer,Array.iota(5)),5.collect{|i| "Neighbour" + i});
Poll.kr(trig, BufRd.kr(1,outputBuffer,Array.iota(5)),5.collect{|i| "Neighbour" + i});
Silent.ar;
}.play;
}.play;
)
::

@ -67,7 +67,6 @@ A function to run when complete, taking an array of the counts for each category
EXAMPLES::
Server.default.options.outDevice = "Built-in Output"
code::
(
@ -76,7 +75,7 @@ code::
64.collect{(1.sum3rand) + [1,-1].choose}.clump(2)
}).flatten(1) * 0.5;
fork{
~dataSet = FluidDataSet.new(s,\kmeans_help_rand2d);
~dataSet = FluidDataSet(s);
d = Dictionary.with(
*[\cols -> 2,\data -> Dictionary.newFrom(
~points.collect{|x, i| [i, x]}.flatten)]);
@ -106,10 +105,6 @@ fork{
~kmeans.size;
~kmeans.dump;
~clusters.getLabel(0,{|clusterID|
(0.asString+clusterID).postln;
});
// Retrieve labels of clustered points
(
~assignments = Array.new(128);
@ -157,8 +152,6 @@ subsection:: Queries in a Synth
This is the equivalent of predictPoint, but wholly on the server
code::
outputPoint.getToFloatArray(action:{|a|a.postln})
(
{
var trig = Impulse.kr(5);

@ -62,7 +62,7 @@ code::
~test = FluidDataSet(s);
~mapping = FluidLabelSet(s);
)
s.dumpOSC
//Make some clumped 2D points and place into a DataSet
(
~examplepoints = [[0.5,0.5],[-0.5,0.5],[0.5,-0.5],[-0.5,-0.5]];
@ -93,7 +93,7 @@ d = Dictionary.with(
~classifier.predict(~test, ~mapping, 1);
)
//Return labels of clustered points
//Return labels of clustered points - wait for the dump to be done
(
~assignments = Array.new(~testpoints.size);
fork{
@ -175,7 +175,7 @@ code::
var outputPoint = LocalBuf(1);
point.collect{|p,i| BufWr.kr([p],inputPoint,i)};
~classifier.kr(trig,inputPoint,outputPoint);
SinOsc.ar((BufRd.kr(1,outputPoint,0,interpolation:0) + 69).midicps,mul: 0.1);
SinOsc.ar((BufRd.kr(1,outputPoint,0,interpolation:0) + 69).midicps, mul: 0.1);
}.play
)
::

@ -106,7 +106,7 @@ d = Dictionary.with(
//We should see a single cycle of a chirp
~outputdata.plot;
s.dumpOSC
// single point transform on arbitrary value
~inbuf = Buffer.loadCollection(s,[0.5]);
~regressor.predictPoint(~inbuf,{|x|x.postln;});
@ -115,7 +115,7 @@ s.dumpOSC
subsection:: Server Side Queries
code::
//Setup
//we are here querying with a saw in control rate, all on the server, via a buffer interface
(
{
var input = Saw.kr(2).linlin(-1,1,0,1);
@ -124,7 +124,7 @@ code::
var outputPoint = LocalBuf(1);
BufWr.kr(input,inputPoint,0);
~regressor.kr(trig,inputPoint,outputPoint);
BufRd.kr(1,outputPoint,0);//,"mapped value")
BufRd.kr(1,outputPoint,0);
}.scope
)

@ -13,15 +13,6 @@ METHOD:: new
Make a new instance of a label set, uniquely identified by its name. Creating an instance with a name already in use will throw an exception. Use link::Classes/FluidLabelSet#*at:: or free the existing instance.
ARGUMENT:: server
The link::Classes/Server:: on which to create the label set.
ARGUMENT:: name
symbol with the label set's name.
METHOD:: at
Retrieve a label set from the cache.
ARGUMENT:: server
The link::Classes/Server:: on which to create the label set.
ARGUMENT:: name
symbol or string with the label set's name.
INSTANCEMETHODS::
@ -30,7 +21,7 @@ PRIVATE:: init, id
METHOD:: addLabel
Add a label to the label set.
ARGUMENT:: id
ARGUMENT:: identifier
symbol or string with the ID for this label.
ARGUMENT:: label
symbol or string with the label to add.

@ -1,7 +1,7 @@
TITLE:: FluidMDS
summary:: Dimensionality Reduction with Multidimensional Scaling
categories:: Dimensionality Reduction, Data Processing
related:: Classes/FluidMDS, Classes/FluidDataSet
related:: Classes/FluidPCA, Classes/FluidDataSet
DESCRIPTION::
@ -99,10 +99,10 @@ FluidBufMFCC.process(s,~audio, features: ~mfcc_feature);
dsWr = FluidDataSetWr.kr(~raw, buf: buf, trig: Done.kr(stats),blocking:1);
LocalOut.kr(Done.kr(dsWr));
FreeSelf.kr(count - 99);
Poll.kr(trig,count);
Poll.kr(trig,(100-count));
}.play;
)
// wait for the post window to acknoledge the job is done.
// wait for the count to reaches 0 in the post window.
//First standardize our DataSet, so that the MFCC dimensions are on comensurate scales
//Then apply the MDS in-place on the standardized data to get 2 dimensions, using a Euclidean distance metric

@ -89,7 +89,7 @@ x = {arg type = 0;
// change the wave types, observe the amplitude invariance of the descriptors, apart from the leftmost coefficient
x.set(\type, 1)
~winRange = 50; //adjust the range above and below 0 to zoom in or out on the MFCC
~winRange = 5; //adjust the range above and below 0 to zoom in or out on the MFCC
x.set(\type, 2)
x.set(\type, 0)
// free this source

@ -124,10 +124,9 @@ code::
//Run the test data through the network, into the predicted labelset
~classifier.predict(~testdata,~predictedlabels,action:{"Test complete".postln});
OSCFunc.trace(true,true)
OSCFunc.allEnabled
//get labels from server
~predictedlabels.dump(action:{|d|~labelsdict = d["data"]};~labelsdict.postln);
~predictedlabels.dump(action:{|d| ~labelsdict = d["data"]; ~labelsdict.postln});
//Visualise: we're hoping to see colours neatly mapped to quandrants...
(

@ -112,7 +112,7 @@ c = Buffer.new(s);
// train only 2 seconds
(
Routine {
FluidBufNMF.process(s,b,0,88200,0,1, c, ~bases, components:10,fftSize:2048);
FluidBufNMF.process(s,b,0,88200,0,1, c, ~bases, components:10,fftSize:2048).wait;
c.query;
}.play;
)
@ -197,7 +197,7 @@ c = Buffer.new(s);
// train only 2 seconds
(
Routine {
FluidBufNMF.process(s,b,0,88200,0,1, c, ~bases, components:8, hopSize:256, fftSize:2048);
FluidBufNMF.process(s,b,0,88200,0,1, c, ~bases, components:8, hopSize:256, fftSize:2048).wait;
c.query;
}.play;
)

@ -1,7 +1,7 @@
TITLE:: FluidNormalize
summary:: Normalize a FluidDataSet
categories:: FluidManipulation
related:: Classes/FluidStandardize, Classes/FluidDataSet
related:: Classes/FluidStandardize, Classes/FluidRobustScale, Classes/FluidDataSet
DESCRIPTION::
Normalize the entries of a link::Classes/FluidDataSet::, or normalize a data point according to the learned bounds of a data set. On the server.
@ -69,8 +69,8 @@ s.boot;
// FluidNormalize.dumpAllMethods
(
~audiofile = File.realpath(FluidBufPitch.class.filenameSymbol).dirname +/+ "../AudioFiles/Tremblay-ASWINE-ScratchySynth-M.wav";
~raw = FluidDataSet(s,\norm_help_raw);
~norm = FluidDataSet(s,\norm_help_normd);
~raw = FluidDataSet(s);
~norm = FluidDataSet(s);
~pitch_feature = Buffer.new(s);
~stats = Buffer.alloc(s, 7, 2);
~normalizer = FluidNormalize(s);
@ -122,15 +122,14 @@ FluidBufPitch.process(s,~audio, features: ~pitch_feature);
//which can be unhelpful in many cases
(
~rawarray.flatten(1).unlace.plot("Unnormalized",Rect(0,0,400,400),minval:0,maxval:[5000,1]).plotMode=\bars;
~plot2 = ~normedarray.flatten(1).unlace.plot("Normalized",Rect(410,0,400,400)).plotMode=\bars;
(~rawarray ++ 0).flop.plot("Unnormalized",Rect(0,0,400,400),minval:0,maxval:[5000,1]).plotMode=\bars;
(~normedarray ++ 0).flop.plot("Normalized",Rect(410,0,400,400)).plotMode=\bars;
)
// single point transform on arbitrary value
~inbuf = Buffer.loadCollection(s,0.5.dup);
~outbuf = Buffer.new(s);
~normalizer.transformPoint(~inbuf,~outbuf,{|x|x.postln;x.getn(0,2,{|y|y.postln;};)});
OSCFunc.trace(false,true)
//Server side queries
(

@ -36,7 +36,7 @@ Source data, or the DataSet name
ARGUMENT:: destDataSet
Destination data, or the DataSet name
ARGUMENT:: action
Run when done. The variance is passed as an argument, aka the fidelity of the new representation: a value near 1.0 means a higher fidelity to the original.
Run when done. The fraction of accounted variance is passed as an argument, aka the fidelity of the new representation: a value near 1.0 means a higher fidelity to the original.
METHOD:: fitTransform
link::Classes/FluidPCA#fit:: and link::Classes/FluidPCA#transform:: in a single pass
@ -45,7 +45,7 @@ Source data, or the DataSet name
ARGUMENT:: destDataSet
Destination data, or the DataSet name
ARGUMENT:: action
Run when done. The variance is passed as an argument, aka the fidelity of the new representation: a value near 1.0 means a higher fidelity to the original.
Run when done. The fraction of accounted variance is passed as an argument, aka the fidelity of the new representation: a value near 1.0 means a higher fidelity to the original.
METHOD:: transformPoint
Given a trained model, transform the data point in a link::Classes/Buffer:: and write to an output
@ -102,10 +102,10 @@ FluidBufMFCC.process(s,~audio, features: ~mfcc_feature,action:{"Done MFCCs".post
dsWr = FluidDataSetWr.kr(~raw, buf: buf, trig: Done.kr(stats));
LocalOut.kr( Done.kr(dsWr));
FreeSelf.kr(count - 99);
Poll.kr(trig,count);
Poll.kr(trig,(100 - count));
}.play;
)
// wait for the post window to acknoledge the job is done.
// wait for the count to reaches 0 in the post window.
//First standardize our DataSet, so that the MFCC dimensions are on comensurate scales
//Then apply the PCA in-place on the standardized data

@ -93,7 +93,7 @@ s.reboot;
~slicer = FluidSliceCorpus({ |src,start,num,dest|
FluidBufOnsetSlice.kr(src,start,num,indices:dest, threshold:2)
});
~pitchdata = FluidDataSet(s,\FluidProcessSlicesHelp);
~pitchdata = FluidDataSet(s);
~pitchbufs = 4.collect{Buffer.new};
~statsbufs = 4.collect{Buffer.new};
)

@ -0,0 +1,158 @@
TITLE:: FluidRobustScale
summary:: Apply Robust Scaling to FluidDataSet
categories:: FluidManipulation
related:: Classes/FluidStandardize, Classes/FluidNormalize, Classes/FluidDataSet
DESCRIPTION::
Scales the entries of a link::Classes/FluidDataSet::, or scales a data point according to the learned values of a data set. It will centre the median of each dimension to 0, and will scale the data to +/- the provided centiles, by default providing the first and third qartile (25 and 75).All happens on the server.
See https://scikit-learn.org/stable/auto_examples/preprocessing/plot_all_scaling.html#robustscaler
CLASSMETHODS::
private:: kr
METHOD:: new
Create a new instance
ARGUMENT:: server
The link::Classes/Server:: on which to run
ARGUMENT:: low
The low centile boundary, default 25.
ARGUMENT:: high
The high centile boundary, default 75.
ARGUMENT:: invert
The direction in which the scaling will occur for transform and transformpoint. The default 0 is taking in the range of the input used to fit and transforms it towards the robust scaling range. A value of 1 will expect an input of the scaling range to transform back to the original range.
INSTANCEMETHODS::
METHOD:: fit
Compute the scaling factors from a link::Classes/FluidDataSet:: for later.
ARGUMENT:: dataSet
The link::Classes/FluidDataSet:: to scale
ARGUMENT:: action
A function to run when processing is complete
METHOD:: transform
Scale a link::Classes/FluidDataSet:: into another link::Classes/FluidDataSet::, using the learned range from a previous call to link::Classes/FluidRobustScale#fit::
ARGUMENT:: sourceDataSet
The link::Classes/FluidDataSet:: to scale
ARGUMENT:: destDataSet
The link::Classes/FluidDataSet:: to populate with scaled data
ARGUMENT:: action
A function to run when processing is complete
METHOD:: fitTransform
Scale a link::Classes/FluidDataSet::
ARGUMENT:: sourceDataSet
The link::Classes/FluidDataSet:: to scale
ARGUMENT:: destDataSet
The link::Classes/FluidDataSet:: to populate with scaled data
ARGUMENT:: action
A function to run when processing is complete
METHOD:: transformPoint
Scale a new data point, using the learned scaling from a previous call to link::Classes/FluidRobustScale#fit::
ARGUMENT:: sourceBuffer
A link::Classes/Buffer:: with the new data point
ARGUMENT:: destBuffer
A link::Classes/Buffer:: to contain the scaled value
ARGUMENT:: action
A function to run when processing is complete
EXAMPLES::
code::
s.boot;
//Preliminaries: we want some audio, a couple of FluidDataSets, some Buffers and a FluidRobustScale
(
~audiofile = File.realpath(FluidBufPitch.class.filenameSymbol).dirname +/+ "../AudioFiles/Tremblay-ASWINE-ScratchySynth-M.wav";
~raw = FluidDataSet(s);
~scaled = FluidDataSet(s);
~pitch_feature = Buffer.new(s);
~stats = Buffer.alloc(s, 7, 2);
~robust = FluidRobustScale(s);
)
// Load audio and run a pitch analysis, which gives us pitch and pitch confidence (so a 2D datum)
(
~audio = Buffer.read(s,~audiofile);
FluidBufPitch.process(s,~audio, features: ~pitch_feature);
)
// Divide the time series in to 10, and take the mean of each segment and add this as a point to
// the 'raw' FluidDataSet
(
{
var trig = LocalIn.kr(1, 1);
var buf = LocalBuf(2, 1);
var count = PulseCount.kr(trig) - 1;
var chunkLen = (~pitch_feature.numFrames / 10).asInteger;
var stats = FluidBufStats.kr(
source: ~pitch_feature, startFrame: count * chunkLen,
numFrames: chunkLen, stats: ~stats, trig: (trig * (count <=9)), blocking:1
);
var rd = BufRd.kr(2, ~stats, DC.kr(0), 0, 1);// pick only mean pitch and confidence
var wr1 = BufWr.kr(rd[0], buf, DC.kr(0));
var wr2 = BufWr.kr(rd[1], buf, DC.kr(1));
var dsWr = FluidDataSetWr.kr(~raw, buf: buf, trig: Done.kr(stats));
LocalOut.kr( Done.kr(dsWr));
Poll.kr(trig,count,\count);
FreeSelf.kr(count - 9);
}.play;
)
//check the dataset
~raw.print;
// Scale and load to language-side array
(
~rawarray = Array.new(10);
~scaledarray= Array.new(10);
~robust.fitTransform(~raw,~scaled, {
~raw.dump{|x| 10.do{|i|
~rawarray.add(x["data"][i.asString])
}};
~scaled.dump{|x| 10.do{|i|
~scaledarray.add(x["data"][i.asString])
}};
});
)
//Plot side by side. Before normalization the two dimensions have radically different scales
//which can be unhelpful in many cases
(
(~rawarray ++ 0).flop.plot("Raw Data",Rect(0,0,400,400),minval:0,maxval:[5000,1]).plotMode=\bars;
(~scaledarray ++ 0).flop.plot("Scaled",Rect(410,0,400,400), minval:-2,maxval:2).plotMode=\bars;
)
// single point transform on arbitrary value
~inbuf = Buffer.loadCollection(s,0.5.dup);
~outbuf = Buffer.new(s);
~robust.transformPoint(~inbuf,~outbuf,{|x|x.postln;x.getn(0,2,{|y|y.postln;};)});
//Server side queries
(
{
var audio = BufRd.ar(1,~audio,LFSaw.ar(BufDur.ir(~audio).reciprocal).range(0, BufFrames.ir(~audio)));
var counter = Stepper.ar(Impulse.ar(ControlRate.ir),max:99);
var trig = A2K.kr(HPZ1.ar(counter) < 0);
//average 100 frames: one could use the MovingAverage extension here
var avg;
var inputPoint = LocalBuf(2);
var outputPoint = LocalBuf(2);
var avgBuf = LocalBuf(100,2);
//running average of pitch features
BufWr.kr(FluidPitch.kr(audio),avgBuf,phase:counter);
avg = Mix.new(BufRd.kr(2, avgBuf, phase:100.collect{|x|x})) * 0.01;
//assemble data point
BufWr.kr(avg[0],inputPoint,0);
BufWr.kr(avg[1],inputPoint,1);
~robust.kr(trig,inputPoint,outputPoint);
Poll.kr(trig,BufRd.kr(1,inputPoint,[0,1]),["pitch (raw)", "confidence (raw)"]);
Poll.kr(trig,BufRd.kr(1,outputPoint,[0,1]),["pitch (scaled)", "confidence (scaled)"])
}.play;
)
::

@ -1,7 +1,7 @@
TITLE:: FluidStandardize
summary:: Standardize a FluidDataSet
categories:: FluidManipulation
related:: Classes/FluidDataSet, Classes/FluidStandardize
related:: Classes/FluidDataSet, Classes/FluidNormalize, Classes/FluidRobustScale
DESCRIPTION::
Standardize a link::Classes/FluidDataSet::, i.e. rescale using its mean(s) and standard deviation(s) in each dimension.
@ -112,8 +112,8 @@ FluidBufPitch.process(s,~audio, features: ~pitch_feature,action:{"Analysed Pitch
)
(
~rawarray.flatten(1).unlace.plot("Unstandardized",Rect(0,0,400,400),minval:0,maxval:[5000,1]).plotMode=\bars;
~plot2 = ~stdarray.flatten(1).unlace.plot("Standardized",Rect(410,0,400,400)).plotMode=\bars;
(~rawarray ++ 0).flop.plot("Unstandardized",Rect(0,0,400,400),minval:0,maxval:[5000,1]).plotMode=\bars;
(~stdarray ++ 0).flop.plot("Standardized",Rect(410,0,400,400), minval:-2,maxval:2).plotMode=\bars;
)
// single point transform on arbitrary value

@ -1,7 +1,7 @@
TITLE:: FluidUMAP
summary:: Dimensionality Reduction with Uniform Manifold Approximation and Projection
categories:: Dimensionality Reduction, Data Processing
related:: Classes/FluidMDS, Classes/FluidDataSet
related:: Classes/FluidMDS, Classes/FluidPCA, Classes/FluidDataSet
DESCRIPTION::
@ -25,13 +25,27 @@ ARGUMENT:: iterations
The number of iterations that the algorithm will go through to optimise the new representation
ARGUMENT:: learnRate
The learning rate of the algorithm, aka how much of the error it uses to estimate the next iteration.
ARGUMENT:: batchSize
The training batch size.
INSTANCEMETHODS::
PRIVATE:: init
METHOD:: fit
Train this model on a link::Classes/FluidDataSet:: but don't transform the data
ARGUMENT:: dataSet
A link::Classes/FluidDataSet:: to analyse
ARGUMENT:: action
Run when done
METHOD:: transform
Given a trained model, apply the reduction to a source link::Classes/FluidDataSet:: and write to a destination. Can be the same for both (in-place)
ARGUMENT:: sourceDataSet
Source data, or the DataSet name
ARGUMENT:: destDataSet
Destination data, or the DataSet name
ARGUMENT:: action
Run when done.
METHOD:: fitTransform
Fit the model to a link::Classes/FluidDataSet:: and write the new projected data to a destination FluidDataSet.
ARGUMENT:: sourceDataSet
@ -52,8 +66,8 @@ code::
~reduced = FluidDataSet(s);
~normalized = FluidDataSet(s);
~standardizer = FluidStandardize(s);
~normalizer = FluidNormalize(s);
~umap = FluidUMAP(s).numDimensions_(2).numNeighbours_(5).minDist_(0.2).iterations_(50). learnRate_(0.2).batchSize_(50);
~normalizer = FluidNormalize(s, 0.05, 0.95);
~umap = FluidUMAP(s).numDimensions_(2).numNeighbours_(5).minDist_(0.2).iterations_(50).learnRate_(0.2);
)
@ -78,22 +92,156 @@ code::
//Visualise the 2D projection of our original 4D data
(
w = Window("scatter", Rect(128, 64, 200, 200));
w = Window("a perspective", Rect(128, 64, 200, 200));
w.drawFunc = {
Pen.use {
~normalizedDict.keysValuesDo{|key, val|
Pen.use {
~normalizedDict.keysValuesDo{|key, val|
Pen.fillColor = Color.new(~colours[key.asSymbol][0], ~colours[key.asSymbol][1],~colours[key.asSymbol][2]);
Pen.fillOval(Rect((val[0] * 200), (val[1] * 200), 5, 5));
~colours[key.asSymbol].flat.postln;
}
}
~colours[key.asSymbol].flat;
}
}
};
w.refresh;
w.front;
)
//play with parameters
~umap.numNeighbours = 10;
~umap.minDist =5;
~umap.batchSize = 10;
~umap.numNeighbours_(10).minDist_(0.5).iterations_(100).learnRate_(0.1);
//rerun the UMAP
~umap.fitTransform(~standardized,~reduced,action:{"Finished UMAP".postln});
//draw to compare
(
~normalizer.fitTransform(~reduced,~normalized,action:{
"Normalized Output".postln;
~normalized.dump{|x|
~normalizedDict = x["data"];
{
u = Window("another perspective", Rect(328, 64, 200, 200));
u.drawFunc = {
Pen.use {
~normalizedDict.keysValuesDo{|key, val|
Pen.fillColor = Color.new(~colours[key.asSymbol][0], ~colours[key.asSymbol][1],~colours[key.asSymbol][2]);
Pen.fillOval(Rect((val[0] * 200), (val[1] * 200), 5, 5));
~colours[key.asSymbol].flat;
};
};
};
u.refresh;
u.front;
}.defer;
};
});
)
// now run new random points on the same training material. Colours should be scattered around the same space
~newDS = FluidDataSet(s);
~colours2 = Dictionary.newFrom(400.collect{|i|[("entry"++i).asSymbol, 3.collect{1.0.rand}]}.flatten(1));
~newDS.load(Dictionary.newFrom([\cols, 3, \data, ~colours2]));
//we need to standardize to the same space
~newDSstan = FluidDataSet(s);
~standardizer.transform(~newDS, ~newDSstan);
//then we can run the umap
~newDSmap = FluidDataSet(s);
~umap.transform(~newDSstan, ~newDSmap);
//then we can draw and look
(
~normalizer.transform(~newDSmap,~normalized,action:{
"Normalized Output".postln;
~normalized.dump{|x|
~normalizedDict = x["data"];
{
t = Window("new material", Rect(528, 64, 200, 200));
t.drawFunc = {
Pen.use {
~normalizedDict.keysValuesDo{|key, val|
Pen.fillColor = Color.new(~colours2[key.asSymbol][0], ~colours2[key.asSymbol][1],~colours2[key.asSymbol][2]);
Pen.fillOval(Rect((val[0] * 200), (val[1] * 200), 5, 5));
~colours2[key.asSymbol].flat;
};
};
};
t.refresh;
t.front;
}.defer;
};
});
)
//if we process the original dataset, we will see small differences in positions
~reduced2 = FluidDataSet(s);
~umap.transform(~standardized, ~reduced2, action: {\done.postln;});
//then we can draw and look
(
~normalizer.transform(~reduced2,~normalized,action:{
"Normalized Output".postln;
~normalized.dump{|x|
~normalizedDict = x["data"];
{
z = Window("old material", Rect(728, 64, 200, 200));
z.drawFunc = {
Pen.use {
~normalizedDict.keysValuesDo{|key, val|
Pen.fillColor = Color.new(~colours[key.asSymbol][0], ~colours[key.asSymbol][1],~colours[key.asSymbol][2]);
Pen.fillOval(Rect((val[0] * 200), (val[1] * 200), 5, 5));
~colours[key.asSymbol].flat;
};
};
};
z.refresh;
z.front;
}.defer;
};
});
)
//this is because the fitTransform method has the advantage of being certain that the data it transforms is the one that has been used to fit the model. This allows for more accurate distance measurement.
//to check, let's retrieve a single point and predict its position
(
~sourcePoint = Buffer(s);
~original = Buffer(s);
~standed = Buffer(s);
~umaped = Buffer(s);
)
//retrieve the 3D original
~raw.getPoint("entry49",~sourcePoint)
//retrieve the fitTransformed point as the most accurate point
~reduced.getPoint("entry49",~original, {~original.getn(0,2,{|x|x.postln})})
//retreive the transformed point, via the standardizer
~standardizer.transformPoint(~sourcePoint,~standed);
~umap.transformPoint(~standed, ~umaped, {~umaped.getn(0,2,{|x|x.postln})})
// one can also retrieve in control rate with Server Side Queries
// Let's map our learned UMAP dimensions to the controls of a processor
(
{
var trig = Impulse.kr(1);
var point = WhiteNoise.kr(1.dup(3));
var inputPoint = LocalBuf(3);
var standPoint = LocalBuf(3);
var outputPoint = LocalBuf(2);
var cue1, cue2;
Poll.kr(trig, point, [\pointX,\pointY,\pointZ]);
point.collect{ |p,i| BufWr.kr([p],inputPoint,i)};
cue1 = ~standardizer.kr(trig,inputPoint,standPoint);
Poll.kr(cue1,BufRd.kr(1,standPoint,(0..2),interpolation:0),[\stdX,\stdY, \stdZ]);
cue2 = ~umap.kr(cue1, standPoint, outputPoint);
Poll.kr(cue2,BufRd.kr(1,outputPoint,[0,1],interpolation:0),[\newDimA,\newDimB]);
Silent.ar;
}.play;
)
::

@ -36,6 +36,8 @@ link::Classes/FluidNormalize::
link::Classes/FluidStandardize::
link::Classes/FluidRobustScale::
section:: Dimension Reduction
Compress data to fewer dimensions for visualisation / efficiency / preprocessing

@ -9,6 +9,7 @@
#include <clients/nrt/KNNClassifierClient.hpp>
#include <clients/nrt/KNNRegressorClient.hpp>
#include <clients/nrt/NormalizeClient.hpp>
#include <clients/nrt/RobustScaleClient.hpp>
#include <clients/nrt/StandardizeClient.hpp>
#include <clients/nrt/PCAClient.hpp>
#include <clients/nrt/MDSClient.hpp>
@ -33,6 +34,7 @@ PluginLoad(FluidSTFTUGen)
makeSCWrapper<RTKNNClassifierClient>("FluidKNNClassifier",ft);
makeSCWrapper<RTKNNRegressorClient>("FluidKNNRegressor",ft);
makeSCWrapper<RTNormalizeClient>("FluidNormalize",ft);
makeSCWrapper<RTRobustScaleClient>("FluidRobustScale",ft);
makeSCWrapper<RTStandardizeClient>("FluidStandardize",ft);
makeSCWrapper<RTPCAClient>("FluidPCA",ft);
makeSCWrapper<NRTThreadedMDSClient>("FluidMDS",ft);

Loading…
Cancel
Save