MLPregressor: corrected the helpfiles and toy example for the new parameter in the list

nix
Pierre Alexandre Tremblay 6 years ago
parent bcb4456f79
commit efab1d08b2

@ -4,7 +4,7 @@ var trained = 0, entering = 0;
var va = Array.fill(10,{0.5});
var input = Buffer.alloc(s,2);
var output = Buffer.alloc(s,10);
var mlp = FluidMLPRegressor(s,[6],1,1000,0.1,0,1,0);
var mlp = FluidMLPRegressor(s,[6],1,0,1000,0.1,0,1,0);
var entry = 0;
~inData = FluidDataSet(s,\indata);
@ -31,7 +31,7 @@ f.action = {arg x,y; //if trained, predict the point f.x f.y
}, { //if not entering a point
if (trained == 1, { //if trained
input.setn(0, [f.x, f.y]);
mlp.predictPoint(input,output,0,{
mlp.predictPoint(input,output,{
output.getn(0,10,{
|x|va = x; b.set(\val, va); {a.value = va;}.defer;});
});

@ -26,7 +26,7 @@ Routine{
~mlpHelpTarget.print
// make an MLPregressor
~mlp = FluidMLPRegressor(s,[3],FluidMLPRegressor.sigmoid,1000,0.1,0.1,1,0);//1000 epoch at a time
~mlp = FluidMLPRegressor(s, [3], FluidMLPRegressor.sigmoid, 0, 1000,0.1,0.1,1,0);//1000 epoch at a time
//train on it and observe the error
~mlp.fit(~mlpHelpSource,~mlpHelpTarget,{|x|x.postln;});

@ -20,6 +20,9 @@ An link::Classes/Array:: that gives the sizes of any hidden layers in the networ
ARGUMENT:: activation
The activation function to use for the hidden layer units.
ARGUMENT:: outputLayer
The layer whose output to return. It is negative 0 counting, where the default of 0 is the output layer, and 1 would be the last hidden layer, and so on.
ARGUMENT:: maxIter
The maximum number of iterations to use in training.
@ -65,9 +68,6 @@ Input data
ARGUMENT:: targetDataSet
Output data
ARGUMENT:: layer
Layer whose output to return.
ARGUMENT:: action
Function to run when complete
@ -80,9 +80,6 @@ Input point
ARGUMENT:: targetBuffer
Output point
ARGUMENT:: layer
Layer whose output to return.
ARGUMENT:: action
A function to run when complete
@ -103,7 +100,7 @@ code::
~test = FluidDataSet(s,\mlp_regressor_dest);
~output = FluidDataSet(s,\mlp_regress_out);
~tmpbuf = Buffer.alloc(s,1);
~regressor = FluidMLPRegressor(s,[2],FluidMLPRegressor.tanh,1000,0.1,0.1,1,0);
~regressor = FluidMLPRegressor(s,[2], FluidMLPRegressor.tanh, 0, 1000,0.1,0.1,1,0);
)
//Make source, target and test data

Loading…
Cancel
Save