Merged clients/inter_client_comms into fix/manip_client_new
commit
2402b71caa
@ -1,40 +1,67 @@
|
||||
+ FluidManipulationClient {
|
||||
tmpJSONFilename{
|
||||
^Platform.defaultTempDir++"tmp_fluid_dataset_"++
|
||||
Date.localtime.stamp++".json";
|
||||
^Platform.defaultTempDir++"tmp_fluid_data_"++
|
||||
Date.localtime.stamp++"_"++UniqueID.next++".json";
|
||||
}
|
||||
|
||||
dump {|action|
|
||||
var filename = this.tmpJSONFilename;
|
||||
action ?? {action = postit};
|
||||
this.write(filename, {
|
||||
action.value(filename.parseYAMLFile);
|
||||
File.delete(filename);
|
||||
});
|
||||
this.write(filename, {
|
||||
action.value(this.parseJSON(File.readAllString(filename)));
|
||||
File.delete(filename);
|
||||
});
|
||||
}
|
||||
|
||||
load{|dict, action|
|
||||
var filename = this.tmpJSONFilename;
|
||||
var str = this.asJSON(dict);
|
||||
File.use(filename, "w", { |f| f.write(this.asJSON(dict));});
|
||||
File.use(filename, "wt", { |f| f.write(this.asJSON(dict));});
|
||||
this.read(filename, {
|
||||
action.notNil.if{ action.value };
|
||||
File.delete(filename);
|
||||
action.notNil.if{ action.value; };
|
||||
File.delete(filename);
|
||||
});
|
||||
}
|
||||
|
||||
toDict{|obj|
|
||||
var converted;
|
||||
if(obj.class === Event){
|
||||
converted = obj.as(Dictionary);
|
||||
converted.keysValuesChange{|k,v|this.toDict(v)}
|
||||
^converted;
|
||||
};
|
||||
if(obj.class === Array){
|
||||
converted = obj.collect{|v| this.toDict(v)};
|
||||
^converted;
|
||||
};
|
||||
^obj;
|
||||
}
|
||||
|
||||
parseJSON{|jsonStr|
|
||||
var parsed = jsonStr;
|
||||
jsonStr.do({|char,pos|
|
||||
var inString = false;
|
||||
char.switch(
|
||||
$",{(jsonStr[pos-1]==$\ && inString).not.if({inString = inString.not})},
|
||||
${,{ if(inString.not){parsed[pos] = $(} },
|
||||
$},{ if(inString.not){parsed[pos] = $)} }
|
||||
)
|
||||
});
|
||||
^this.toDict(parsed.interpret);
|
||||
}
|
||||
|
||||
asJSON{|d|
|
||||
if(d.isString || d.isNumber){^d};
|
||||
if(d.isKindOf(Dictionary),
|
||||
if(d.isNumber){^d};
|
||||
if(d.isString){^d.asCompileString};
|
||||
if(d.isKindOf(Dictionary))
|
||||
{
|
||||
^"{" ++ (
|
||||
d.keys.asList.collect{|k|
|
||||
k.asString.asCompileString ++ ":" + this.asJSON(d[k])
|
||||
}).join(", ") ++ "}"
|
||||
});
|
||||
if(d.isKindOf(SequenceableCollection),
|
||||
};
|
||||
if(d.isKindOf(SequenceableCollection))
|
||||
{
|
||||
^"[" ++ d.collect({|x|this.asJSON(x)}).join(", ")++ "]";
|
||||
});
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ -0,0 +1,45 @@
|
||||
// create the data dictionary
|
||||
~data = Dictionary.new
|
||||
7.do{|i| ~data.add(("entry-"++i).asSymbol -> 10.collect{|j|j/10 + i})}
|
||||
|
||||
// nest that dictionary in the dataset format, adding the number of columns
|
||||
~dict = Dictionary.new
|
||||
~dict.add(\data -> ~data)
|
||||
~dict.add(\cols -> 10)
|
||||
|
||||
//create a dataset, then loading the dictionary
|
||||
~ds = FluidDataSet.new(s,\simple1data);
|
||||
~ds.load(~dict)
|
||||
~ds.print
|
||||
|
||||
//fun with kdtree to see it actually works
|
||||
~kdtree = FluidKDTree.new(s)
|
||||
~kdtree.fit(~ds,{\done.postln;})
|
||||
|
||||
~target = Buffer.loadCollection(s,(4).dup(10));
|
||||
~kdtree.kNearest(~target,5,{|a|a.postln;})
|
||||
~kdtree.kNearestDist(~target,5,{|a|a.postln;})
|
||||
|
||||
|
||||
/////////////////////////////////////////////
|
||||
// creating a labelset the same way
|
||||
|
||||
// creating the data dictionary
|
||||
~data2 = Dictionary.new
|
||||
7.do{|i| ~data2.add(("entry-"++i).asSymbol -> (if( i.odd, {["odd"]},{["even"]})))}
|
||||
|
||||
// nesting again
|
||||
~dict2 = Dictionary.new
|
||||
~dict2.add(\data -> ~data2)
|
||||
~dict2.add(\cols -> 1)
|
||||
|
||||
// creating a labelset and loading the dictionary
|
||||
~ls = FluidLabelSet.new(s,\simplelabel);
|
||||
~ls.load(~dict2)
|
||||
~ls.print
|
||||
|
||||
// testin with a classifier toy example
|
||||
~classifier = FluidKNNClassifier.new(s);
|
||||
~classifier.fit(~ds,~ls, {\done.postln;})
|
||||
|
||||
~classifier.predictPoint(~target,2,action: {|x|x.postln;})
|
||||
Loading…
Reference in New Issue