artificial intelligence - Neural network in Javascript not learning properly - Stack Overflow

I've tried to rewrite neural network found here to javascript. My javascript code looks like this.

I've tried to rewrite neural network found here to javascript. My javascript code looks like this.

function NeuralFactor(weight) {
    var self = this;
    this.weight = weight;
    this.delta =  0;
}

function Sigmoid(value) {
    return 1 / (1 + Math.exp(-value));
}

function Neuron(isInput) {
    var self = this;
    this.pulse = function() {
        self.output = 0;
        self.input.forEach(function(item) {
            self.output += item.signal.output * item.factor.weight;
        });

        self.output += self.bias.weight;
        self.output = Sigmoid(self.output);
    };

    this.bias = new NeuralFactor(isInput ? 0 : Math.random());
    this.error = 0;
    this.input = [];
    this.output = 0;

    this.findInput = function(signal) {
        var input = self.input.filter(function(input) {
            return signal == input.signal;
        })[0];
        return input;
    };
}

function NeuralLayer() {
    var self = this;
    this.pulse = function() {
        self.neurons.forEach(function(neuron) {
            neuron.pulse();
        });
    };
    this.neurons = [];
    this.train = function(learningRate) {
        self.neurons.forEach(function(neuron) {
            neuron.bias.weight += neuron.bias.delta * learningRate;
            neuron.bias.delta = 0;
            neuron.input.forEach(function(input) {
                input.factor.weight += input.factor.delta * learningRate;
                input.factor.delta = 0;
            })
        })
    }
}

function NeuralNet(inputCount, hiddenCount, outputCount) {
    var self = this;
    this.inputLayer = new NeuralLayer();
    this.hiddenLayer = new NeuralLayer();
    this.outputLayer = new NeuralLayer();
    this.learningRate = 0.5;

    for(var i = 0; i < inputCount; i++)
        self.inputLayer.neurons.push(new Neuron(true));

    for(var i = 0; i < hiddenCount; i++)
        self.hiddenLayer.neurons.push(new Neuron());

    for(var i = 0; i < outputCount; i++)
        self.outputLayer.neurons.push(new Neuron());

    for (var i = 0; i < hiddenCount; i++)
        for (var j = 0; j < inputCount; j++)
            self.hiddenLayer.neurons[i].input.push({
                signal: self.inputLayer.neurons[j],
                factor: new NeuralFactor(Math.random())
            });

    for (var i = 0; i < outputCount; i++)
        for (var j = 0; j < hiddenCount; j++)
            self.outputLayer.neurons[i].input.push({
                signal: self.hiddenLayer.neurons[j],
                factor: new NeuralFactor(Math.random())
            });

    this.pulse = function() {
        self.hiddenLayer.pulse();
        self.outputLayer.pulse();
    };

    this.backPropagation = function(desiredResults) {
        for(var i = 0; i < self.outputLayer.neurons.length; i++) {
            var outputNeuron = self.outputLayer.neurons[i];
            var output = outputNeuron.output;
            outputNeuron.error = (desiredResults[i] - output) * output * (1.0 - output);
        }
        for(var i = 0; i < self.hiddenLayer.neurons.length; i++) {
            var hiddenNeuron = self.hiddenLayer.neurons[i];
            var error = 0;
            for(var j = 0; j < self.outputLayer.neurons.length; j++) {
                var outputNeuron = self.outputLayer.neurons[j];
                error += outputNeuron.error * outputNeuron.findInput(hiddenNeuron).factor.weight * hiddenNeuron.output * (1.0 - hiddenNeuron.output);
            }
            hiddenNeuron.error = error;
        }
        for(var j = 0; j < self.outputLayer.neurons.length; j++) {
            var outputNeuron = self.outputLayer.neurons[j];
            for(var i = 0; i < self.hiddenLayer.neurons.length; i++) {
                var hiddenNeuron = self.hiddenLayer.neurons[i];
                outputNeuron.findInput(hiddenNeuron).factor.delta += outputNeuron.error * hiddenNeuron.output;
            }
            outputNeuron.bias.delta += outputNeuron.error * outputNeuron.bias.weight;
        }
        for(var j = 0; j < self.hiddenLayer.neurons.length; j++) {
            var hiddenNeuron = self.hiddenLayer.neurons[j];
            for(var i = 0; i < self.inputLayer.neurons.length; i++) {
                var inputNeuron = self.inputLayer.neurons[i];
                hiddenNeuron.findInput(inputNeuron).factor.delta += hiddenNeuron.error * inputNeuron.output;
            }
            hiddenNeuron.bias.delta += hiddenNeuron.error * hiddenNeuron.bias.weight;
        }
    };
    this.train = function(input, desiredResults) {
        for(var i = 0; i < self.inputLayer.neurons.length; i++) {
            var neuron = self.inputLayer.neurons[i];
            neuron.output = input[i];
        }

        self.pulse();
        self.backPropagation(desiredResults);

        self.hiddenLayer.train(self.learningRate);
        self.outputLayer.train(self.learningRate);
    };

}

Now I'm trying to learn it how to resolve XOR problem. I'm teaching it like this:

var net = new NeuralNet(2,2,1);

var testInputs = [[0,0], [0,1], [1,0], [1,1]];
var testOutputs = [[1],[0],[0],[1]];

for (var i = 0; i < 1000; i++)
    for(var j = 0; j < 4; j++)
        net.train(testInputs[j], testOutputs[j]);

function UseNet(a, b) {
    net.inputLayer.neurons[0].output = a;
    net.inputLayer.neurons[1].output = b;
    net.pulse();

    return net.outputLayer.neurons[0].output;
}

The problem is that all results that I get is close to 0.5 and pretty random, no matter what arguments I use. For example:

UseNet(0,0) => 0.5107701166677714
UseNet(0,1) => 0.4801498747476413
UseNet(1,0) => 0.5142463167153447
UseNet(1,1) => 0.4881829364416052

What can be wrong with my code?

I've tried to rewrite neural network found here to javascript. My javascript code looks like this.

function NeuralFactor(weight) {
    var self = this;
    this.weight = weight;
    this.delta =  0;
}

function Sigmoid(value) {
    return 1 / (1 + Math.exp(-value));
}

function Neuron(isInput) {
    var self = this;
    this.pulse = function() {
        self.output = 0;
        self.input.forEach(function(item) {
            self.output += item.signal.output * item.factor.weight;
        });

        self.output += self.bias.weight;
        self.output = Sigmoid(self.output);
    };

    this.bias = new NeuralFactor(isInput ? 0 : Math.random());
    this.error = 0;
    this.input = [];
    this.output = 0;

    this.findInput = function(signal) {
        var input = self.input.filter(function(input) {
            return signal == input.signal;
        })[0];
        return input;
    };
}

function NeuralLayer() {
    var self = this;
    this.pulse = function() {
        self.neurons.forEach(function(neuron) {
            neuron.pulse();
        });
    };
    this.neurons = [];
    this.train = function(learningRate) {
        self.neurons.forEach(function(neuron) {
            neuron.bias.weight += neuron.bias.delta * learningRate;
            neuron.bias.delta = 0;
            neuron.input.forEach(function(input) {
                input.factor.weight += input.factor.delta * learningRate;
                input.factor.delta = 0;
            })
        })
    }
}

function NeuralNet(inputCount, hiddenCount, outputCount) {
    var self = this;
    this.inputLayer = new NeuralLayer();
    this.hiddenLayer = new NeuralLayer();
    this.outputLayer = new NeuralLayer();
    this.learningRate = 0.5;

    for(var i = 0; i < inputCount; i++)
        self.inputLayer.neurons.push(new Neuron(true));

    for(var i = 0; i < hiddenCount; i++)
        self.hiddenLayer.neurons.push(new Neuron());

    for(var i = 0; i < outputCount; i++)
        self.outputLayer.neurons.push(new Neuron());

    for (var i = 0; i < hiddenCount; i++)
        for (var j = 0; j < inputCount; j++)
            self.hiddenLayer.neurons[i].input.push({
                signal: self.inputLayer.neurons[j],
                factor: new NeuralFactor(Math.random())
            });

    for (var i = 0; i < outputCount; i++)
        for (var j = 0; j < hiddenCount; j++)
            self.outputLayer.neurons[i].input.push({
                signal: self.hiddenLayer.neurons[j],
                factor: new NeuralFactor(Math.random())
            });

    this.pulse = function() {
        self.hiddenLayer.pulse();
        self.outputLayer.pulse();
    };

    this.backPropagation = function(desiredResults) {
        for(var i = 0; i < self.outputLayer.neurons.length; i++) {
            var outputNeuron = self.outputLayer.neurons[i];
            var output = outputNeuron.output;
            outputNeuron.error = (desiredResults[i] - output) * output * (1.0 - output);
        }
        for(var i = 0; i < self.hiddenLayer.neurons.length; i++) {
            var hiddenNeuron = self.hiddenLayer.neurons[i];
            var error = 0;
            for(var j = 0; j < self.outputLayer.neurons.length; j++) {
                var outputNeuron = self.outputLayer.neurons[j];
                error += outputNeuron.error * outputNeuron.findInput(hiddenNeuron).factor.weight * hiddenNeuron.output * (1.0 - hiddenNeuron.output);
            }
            hiddenNeuron.error = error;
        }
        for(var j = 0; j < self.outputLayer.neurons.length; j++) {
            var outputNeuron = self.outputLayer.neurons[j];
            for(var i = 0; i < self.hiddenLayer.neurons.length; i++) {
                var hiddenNeuron = self.hiddenLayer.neurons[i];
                outputNeuron.findInput(hiddenNeuron).factor.delta += outputNeuron.error * hiddenNeuron.output;
            }
            outputNeuron.bias.delta += outputNeuron.error * outputNeuron.bias.weight;
        }
        for(var j = 0; j < self.hiddenLayer.neurons.length; j++) {
            var hiddenNeuron = self.hiddenLayer.neurons[j];
            for(var i = 0; i < self.inputLayer.neurons.length; i++) {
                var inputNeuron = self.inputLayer.neurons[i];
                hiddenNeuron.findInput(inputNeuron).factor.delta += hiddenNeuron.error * inputNeuron.output;
            }
            hiddenNeuron.bias.delta += hiddenNeuron.error * hiddenNeuron.bias.weight;
        }
    };
    this.train = function(input, desiredResults) {
        for(var i = 0; i < self.inputLayer.neurons.length; i++) {
            var neuron = self.inputLayer.neurons[i];
            neuron.output = input[i];
        }

        self.pulse();
        self.backPropagation(desiredResults);

        self.hiddenLayer.train(self.learningRate);
        self.outputLayer.train(self.learningRate);
    };

}

Now I'm trying to learn it how to resolve XOR problem. I'm teaching it like this:

var net = new NeuralNet(2,2,1);

var testInputs = [[0,0], [0,1], [1,0], [1,1]];
var testOutputs = [[1],[0],[0],[1]];

for (var i = 0; i < 1000; i++)
    for(var j = 0; j < 4; j++)
        net.train(testInputs[j], testOutputs[j]);

function UseNet(a, b) {
    net.inputLayer.neurons[0].output = a;
    net.inputLayer.neurons[1].output = b;
    net.pulse();

    return net.outputLayer.neurons[0].output;
}

The problem is that all results that I get is close to 0.5 and pretty random, no matter what arguments I use. For example:

UseNet(0,0) => 0.5107701166677714
UseNet(0,1) => 0.4801498747476413
UseNet(1,0) => 0.5142463167153447
UseNet(1,1) => 0.4881829364416052

What can be wrong with my code?

Share Improve this question edited Mar 18, 2014 at 22:34 Henk Holterman 274k32 gold badges351 silver badges533 bronze badges asked Mar 18, 2014 at 22:21 Łukasz W.Łukasz W. 9,7635 gold badges39 silver badges65 bronze badges 3
  • Off-topic: you could define most methods in the prototype instead of in each instance. – Oriol Commented Mar 21, 2014 at 22:12
  • definitely give stackoverflow./questions/13998970/… a read-through – Mike 'Pomax' Kamermans Commented Mar 21, 2014 at 23:45
  • Really impressive! Hi, I would really like to see this in action. Do you have this hosted somewhere with the functioning example code and some sort of interface (can be spartan) I don't know how to trigger this to start learning and how do you dump the output to see the results?...Fascinating stuff. – Frank Tudor Commented Mar 25, 2014 at 20:30
Add a ment  | 

3 Answers 3

Reset to default 2 +50

This network is big enough for the XOR problem and I can't see any obvious mistakes, so I suspect it's getting stuck in a local minimum.

Try going through the training set 10,000 times instead of 1000; this gives it a better chance of breaking out of any minima and converging. You can also increase convergence a lot by upping the number of hidden neurons, tweaking η (the learning rate) or adding momentum. To implement the latter, try using this as your training function:

this.train = function(learningRate) {
    var momentum = 0 /* Some value, probably fairly small. */;
    self.neurons.forEach(function(neuron) {
        neuron.bias.weight += neuron.bias.delta * learningRate;
        neuron.bias.delta = 0;
        neuron.input.forEach(function(input) {
            input.factor.weight += (input.factor.delta * learningRate) + (input.factor.weight * momentum);
            input.factor.delta = 0;
        })
    })
}

I've had good results changing the learning rate to 1.5 (which is pretty high) and momentum to 0.000001 (which is pretty small).

(Incidentally, have you tried running the .NET implementation with a few different seeds? It can take quite a while to converge too!)

This system uses fuzzy logic. As it says in the article don't use integers instead use "close" real numbers as the article suggests -- try

UseNet(0.1,0.1) => 
UseNet(0.1,0.9) => 
UseNet(0.9,0.1) => 
UseNet(0.9,0.9) => 

For the results anything above 0.5 is a 1 and below is 0

Hmmmm

Try instead of:

var testInputs = [[0,0], [0,1], [1,0], [1,1]];
var testOutputs = [[1],[0],[0],[1]];

This:

var testInputs = [[0.05,0.05], [0.05,0.95], [0.95,0.05], [0.95,0.95]];
var testOutputs = [[1],[0],[0],[1]];

or

var testInputs = [[0,0], [0,1], [1,0], [1,1]];
var testOutputs = [[0.95],[0.05],[0.05],[0.95]];

发布者:admin,转转请注明出处:http://www.yc00.com/questions/1745136880a4613245.html

相关推荐

发表回复

评论列表(0条)

  • 暂无评论

联系我们

400-800-8888

在线咨询: QQ交谈

邮件:admin@example.com

工作时间:周一至周五,9:30-18:30,节假日休息

关注微信