nn
nn copied to clipboard
output layers length forgotten after json
Hello,
when save and charged an nn object with more than one hidden layers, the length of the output layer is forgotten.
i write some mocha test that prove it :
it('trains 2 in 2 out with 1 hidden', function (done) {
var net = nn()
//this example should work
net.train([
{ input: [ 0 , 0.1 ], output: [ -0.1 , 0.1 ] },
{ input: [ 0.1, 0.1 ], output: [ 0, 0.2 ] },
{ input: [ 0.2, 0.2 ], output: [ 0 , 0.4 ] },
{ input: [ 0.3, -0.2 ], output: [ 0.5 , 0.1 ] },
{ input: [ 0.4, 0.3 ], output: [ 0.1 , 0.7 ] },
{ input: [ 0.5, -0.3 ], output: [ 0.8, 0.2 ] },
{ input: [ 0.6, 0.4 ], output: [ 0.2, 1 ] },
])
var output = net.send([ 0.5,0.5 ]) // => [ 0, 1 ]
console.log('trained - , + output for [0.5, 0.5] : [%s]. desiredOutput: [0, 1]', output)
var json = net.toJson();
var net = nn()
net.fromJson(json);
var output = net.send([ 0.5,0.5 ]) // => [ 0, 1 ]
console.log('trained - , + output for [0.5, 0.5] : [%s]. desiredOutput: [0, 1]', output)
assert(output.length = 2)
done()
})
it('trains 2 in 2 out with 2 hidden', function (done) {
var net = nn({
layers: [ 5, 4 ],
iterations: 2
})
//this example should work
net.train([
{ input: [ 0 , 0.1 ], output: [ -0.1 , 0.1 ] },
{ input: [ 0.1, 0.1 ], output: [ 0 , 0.2 ] },
{ input: [ 0.2, 0.2 ], output: [ 0 , 0.4 ] },
{ input: [ 0.3, -0.2 ], output: [ 0.5 , 0.1 ] },
{ input: [ 0.4, 0.3 ], output: [ 0.1 , 0.7 ] },
{ input: [ 0.5, -0.3 ], output: [ 0.8 , 0.2 ] },
{ input: [ 0.6, 0.4 ], output: [ 0.2 , 1 ] },
])
var output = net.send([ 0.5,0.5 ]) // => [ 0, 1 ]
console.log('trained - , + output for [0.5, 0.5] : [%s]. desiredOutput: [0, 1]', output)
var json = net.toJson();
var net = nn()
net.fromJson(json);
var output = net.send([ 0.5,0.5 ]) // => [ 0, 1 ]
console.log('trained - , + output for [0.5, 0.5] : [%s]. desiredOutput: [0, 1]', output)
assert(output.length == 2)
done()
})
good work by the way :)
The number of layers is not correctly set after reading the Json file. It is always 3 and therefore returns the result of the third layer. Will correct this in a PR.