From 4e1063b0dafa635dd18512c7d863b357aa05e0c3 Mon Sep 17 00:00:00 2001 From: cheminfo bot Date: Thu, 9 Jun 2016 14:45:13 +0000 Subject: [PATCH] Release v0.12.0 --- bower.json | 2 +- dist/ml.js | 425 +++++++++++++++++++++++++-------------------------- package.json | 2 +- 3 files changed, 212 insertions(+), 217 deletions(-) diff --git a/bower.json b/bower.json index 079552b..623531b 100644 --- a/bower.json +++ b/bower.json @@ -1,6 +1,6 @@ { "name": "ml", - "version": "0.11.2", + "version": "0.12.0", "main": [ "dist/ml.js", "dist/ml.min.js" diff --git a/dist/ml.js b/dist/ml.js index d92ceea..7eb63ae 100644 --- a/dist/ml.js +++ b/dist/ml.js @@ -22123,179 +22123,172 @@ return /******/ (function(modules) { // webpackBootstrap var Layer = __webpack_require__(158); var Matrix = __webpack_require__(14); - module.exports = FeedforwardNeuralNetwork; - - /** - * Function that returns a random number between two numbers (inclusive) - * @param {number} min - lower bound - * @param {number} max - upper bound. - * @returns {number} random number - */ - function randomIntegerFromInterval(min, max) { - return Math.floor(Math.random()*(max - min + 1) + min); - } - - /** - * Constructor for the FNN (Feedforward Neural Networks) that takes an Array of Numbers, - * those numbers corresponds to the size of each layer in the FNN, the first and the last number of the array corresponds to the input and the - * output layer respectively. - * - * @param reload - for load purposes. - * @param model - for load purposes. - * @constructor - */ - function FeedforwardNeuralNetwork(reload, model) { - if(reload) { - this.layers = model.layers; - this.inputSize = model.inputSize; - this.outputSize = model.outputSize; + class FeedforwardNeuralNetwork { + /** + * Constructor for the FNN (Feedforward Neural Networks) that takes an Array of Numbers, + * those numbers corresponds to the size of each layer in the FNN, the first and the last number of the array corresponds to the input and the + * output layer respectively. + * + * @constructor + */ + constructor(X, Y) { + if (X === true) { + const model = Y; + this.layers = model.layers; + this.inputSize = model.inputSize; + this.outputSize = model.outputSize; + } else { + if (X.length !== Y.length) + throw new RangeError("X and Y must have the same size."); + this.X = X; + this.Y = Y; + this.inputSize = X[0].length; + this.outputSize = Y[0].length; + } } - } - /** - * Build the Neural Network with an array that represent each hidden layer size. - * - * @param {Array} layersSize - Array of sizes of each layer. - */ - FeedforwardNeuralNetwork.prototype.buildNetwork = function (layersSize) { - layersSize.push(this.outputSize); + /** + * Build the Neural Network with an array that represent each hidden layer size. + * + * @param {Array} layersSize - Array of sizes of each layer. + */ + buildNetwork(layersSize) { + layersSize.push(this.outputSize); - this.layers = new Array(layersSize.length); + this.layers = new Array(layersSize.length); - for (var i = 0; i < layersSize.length; ++i) { - var inSize = (i == 0) ? this.inputSize : layersSize[i - 1]; - this.layers[i] = new Layer(inSize, layersSize[i]); + for (var i = 0; i < layersSize.length; ++i) { + var inSize = (i == 0) ? this.inputSize : layersSize[i - 1]; + this.layers[i] = new Layer(inSize, layersSize[i]); + } + + this.layers[this.layers.length - 1].isSigmoid = false; } - this.layers[this.layers.length - 1].isSigmoid = false; - }; + /** + * Function that applies a forward propagation over the Neural Network + * with one case of the dataset. + * @param {Array} input - case of the dataset. + * @returns {Array} result of the forward propagation. + */ + forwardNN(input) { + var results = input.slice(); - /** - * Function that applies a forward propagation over the Neural Network - * with one case of the dataset. - * @param {Array} input - case of the dataset. - * @returns {Array} result of the forward propagation. - */ - FeedforwardNeuralNetwork.prototype.forwardNN = function (input) { - var results = input.slice(); + for (var i = 0; i < this.layers.length; ++i) { + results = this.layers[i].forward(results); + } - for(var i = 0; i < this.layers.length; ++i) { - results = this.layers[i].forward(results); + return results; } - return results; - }; - - /** - * Function that makes one iteration (epoch) over the Neural Network with one element - * of the dataset with corresponding prediction; the other two arguments are the - * learning rate and the momentum that is the regularization term for the parameters - * of each perceptron in the Neural Network. - * @param {Array} data - Element of the dataset. - * @param {Array} prediction - Prediction over the data object. - * @param {Number} learningRate - * @param momentum - the regularization term. - */ - FeedforwardNeuralNetwork.prototype.iteration = function (data, prediction, learningRate, momentum) { - var forwardResult = this.forwardNN(data); - var error = new Array(forwardResult.length); + /** + * Function that makes one iteration (epoch) over the Neural Network with one element + * of the dataset with corresponding prediction; the other two arguments are the + * learning rate and the momentum that is the regularization term for the parameters + * of each perceptron in the Neural Network. + * @param {Array} data - Element of the dataset. + * @param {Array} prediction - Prediction over the data object. + * @param {Number} learningRate + * @param momentum - the regularization term. + */ + iteration(data, prediction, learningRate, momentum) { + var forwardResult = this.forwardNN(data); + var error = new Array(forwardResult.length); - if(typeof(prediction) === 'number') - prediction = [prediction]; + if (typeof(prediction) === 'number') + prediction = [prediction]; - for (var i = 0; i < error.length; i++) { - error[i] = prediction[i] - forwardResult[i]; - } + for (var i = 0; i < error.length; i++) { + error[i] = prediction[i] - forwardResult[i]; + } - var lengthLayers = this.layers.length; + var lengthLayers = this.layers.length; - for(i = 0; i < lengthLayers; ++i) { - error = this.layers[lengthLayers - 1 - i].train(error, learningRate, momentum); + for (i = 0; i < lengthLayers; ++i) { + error = this.layers[lengthLayers - 1 - i].train(error, learningRate, momentum); + } } - }; - /** - * Method that train the neural network with a given training set with corresponding - * predictions. The options argument has an array of the number of perceptrons that we want in each hidden layer, the - * number of iterations (default 50) that we want to perform, the learning rate and the momentum that is the - * regularization term (default 0.1 for both) for the parameters of each perceptron in the Neural Network. - * - * options: - * * hiddenLayers - Array of number with each hidden layer size. - * * iterations - Number - * * learningRate - Number - * * momentum - Number - * - * @param {Matrix} trainingSet - * @param {Matrix} predictions - * @param {Number} options - */ - FeedforwardNeuralNetwork.prototype.train = function (trainingSet, predictions, options) { - if(options === undefined) options = {}; - - if(trainingSet.length !== predictions.length) - throw new RangeError("the training and prediction set must have the same size."); + /** + * Method that train the neural network with a given training set with corresponding + * predictions. The options argument has an array of the number of perceptrons that we want in each hidden layer, the + * number of iterations (default 50) that we want to perform, the learning rate and the momentum that is the + * regularization term (default 0.1 for both) for the parameters of each perceptron in the Neural Network. + * + * options: + * * hiddenLayers - Array of number with each hidden layer size. + * * iterations - Number + * * learningRate - Number + * * momentum - Number + * + * @param {object} options + */ + train(options) { + if (options === undefined) options = {}; - this.inputSize = trainingSet[0].length; - this.outputSize = predictions[0].length; + const trainingSet = this.X; + const predictions = this.Y; - var hiddenLayers = options.hiddenLayers === undefined ? [10] : options.hiddenLayers; - var iterations = options.iterations === undefined ? 50 : options.iterations; - var learningRate = options.learningRate === undefined ? 0.1 : options.learningRate; - var momentum = options.momentum === undefined ? 0.1 : options.momentum; + var hiddenLayers = options.hiddenLayers === undefined ? [10] : options.hiddenLayers; + var iterations = options.iterations === undefined ? 50 : options.iterations; + var learningRate = options.learningRate === undefined ? 0.1 : options.learningRate; + var momentum = options.momentum === undefined ? 0.1 : options.momentum; - this.buildNetwork(options.hiddenLayers); + this.buildNetwork(hiddenLayers); - for(var i = 0; i < iterations; ++i) { - for(var j = 0; j < predictions.length; ++j) { - var index = randomIntegerFromInterval(0, predictions.length - 1); - this.iteration(trainingSet[index], predictions[index], learningRate, momentum); + for (var i = 0; i < iterations; ++i) { + for (var j = 0; j < predictions.length; ++j) { + var index = randomIntegerFromInterval(0, predictions.length - 1); + this.iteration(trainingSet[index], predictions[index], learningRate, momentum); + } } } - }; - /** - * Function that with a dataset, gives all the predictions for this dataset. - * @param {Matrix} dataset. - * @returns {Array} predictions - */ - FeedforwardNeuralNetwork.prototype.predict = function (dataset) { - if(dataset[0].length !== this.inputSize) - throw new RangeError("The dataset columns must have the same size of the " + - "input layer"); - var result = new Array(dataset.length); - for (var i = 0; i < dataset.length; i++) { - result[i] = this.forwardNN(dataset[i]); + /** + * Function that with a dataset, gives all the predictions for this dataset. + * @param {Matrix} dataset. + * @returns {Array} predictions + */ + predict(dataset) { + if (dataset[0].length !== this.inputSize) + throw new RangeError("The dataset columns must have the same size of the " + + "input layer"); + var result = new Array(dataset.length); + for (var i = 0; i < dataset.length; i++) { + result[i] = this.forwardNN(dataset[i]); + } + + result = new Matrix(result); + return result.columns === 1 ? result.getColumn(0) : result; } - result = new Matrix(result); - return result.columns === 1 ? result.getColumn(0) : result; - }; + toJSON() { + return { + name: 'FNN', + layers: this.layers, + inputSize: this.inputSize, + outputSize: this.outputSize + }; + } - /** - * function that loads a object model into the Neural Network. - * @param model - * @returns {FeedforwardNeuralNetwork} with the provided model. - */ - FeedforwardNeuralNetwork.load = function (model) { - if(model.modelName !== "FNN") - throw new RangeError("The given model is invalid!"); + static load(model) { + if (model.name !== 'FNN') + throw new RangeError('Invalid model: ' + model.name); + return new FeedforwardNeuralNetwork(true, model); + } + } - return new FeedforwardNeuralNetwork(true, model); - }; + module.exports = FeedforwardNeuralNetwork; /** - * Function that exports the actual Neural Network to an object. - * @returns {{modelName: string, layers: *, inputSize: *, outputSize: *}} + * Function that returns a random number between two numbers (inclusive) + * @param {number} min - lower bound + * @param {number} max - upper bound. + * @returns {number} random number */ - FeedforwardNeuralNetwork.prototype.export = function () { - return { - modelName: "FNN", - layers: this.layers, - inputSize: this.inputSize, - outputSize: this.outputSize - }; - }; + function randomIntegerFromInterval(min, max) { + return Math.floor(Math.random() * (max - min + 1) + min); + } /***/ }, @@ -22306,6 +22299,79 @@ return /******/ (function(modules) { // webpackBootstrap var Matrix = __webpack_require__(14); + class Layer { + /** + * Constructor that creates a layer for the neural network given the number of inputs + * and outputs. + * @param inputSize + * @param outputSize + * @constructor + */ + constructor(inputSize, outputSize) { + this.output = Matrix.zeros(1, outputSize).getRow(0); + this.input = Matrix.zeros(1, inputSize + 1).getRow(0); //+1 for bias term + this.deltaWeights = Matrix.zeros(1, (1 + inputSize) * outputSize).getRow(0); + this.weights = randomInitializeWeights(this.deltaWeights.length, inputSize, outputSize); + this.isSigmoid = true; + } + + /** + * Function that performs the forward propagation for the current layer + * @param {Array} input - output from the previous layer. + * @returns {Array} output - output for the next layer. + */ + forward(input) { + this.input = input.slice(); + this.input.push(1); // bias + var offs = 0; // offset used to get the current weights in the current perceptron + this.output = Matrix.zeros(1, this.output.length).getRow(0); + + for (var i = 0; i < this.output.length; ++i) { + for (var j = 0; j < this.input.length; ++j) { + this.output[i] += this.weights[offs + j] * this.input[j]; + } + if (this.isSigmoid) + this.output[i] = sigmoid(this.output[i]); + + offs += this.input.length; + } + + return this.output.slice(); + } + + /** + * Function that performs the backpropagation algorithm for the current layer. + * @param {Array} error - errors from the previous layer. + * @param {Number} learningRate - Learning rate for the actual layer. + * @param {Number} momentum - The regularizarion term. + * @returns {Array} the error for the next layer. + */ + train(error, learningRate, momentum) { + var offs = 0; + var nextError = Matrix.zeros(1, this.input.length).getRow(0);//new Array(this.input.length); + + for (var i = 0; i < this.output.length; ++i) { + var delta = error[i]; + + if (this.isSigmoid) + delta *= sigmoidGradient(this.output[i]); + + for (var j = 0; j < this.input.length; ++j) { + var index = offs + j; + nextError[j] += this.weights[index] * delta; + + var deltaWeight = this.input[j] * delta * learningRate; + this.weights[index] += this.deltaWeights[index] * momentum + deltaWeight; + this.deltaWeights[index] = deltaWeight; + } + + offs += this.input.length; + } + + return nextError; + } + } + module.exports = Layer; /** @@ -22322,7 +22388,7 @@ return /******/ (function(modules) { // webpackBootstrap * @param outputSize - number of output of the current layer * @returns {Array} random array of numbers. */ - function randomInitialzeWeights(numberOfWeights, inputSize, outputSize) { + function randomInitializeWeights(numberOfWeights, inputSize, outputSize) { var epsilon = 2.449489742783 / Math.sqrt(inputSize + outputSize); return Matrix.rand(1, numberOfWeights).mul(2 * epsilon).sub(epsilon).getRow(0); } @@ -22345,77 +22411,6 @@ return /******/ (function(modules) { // webpackBootstrap return value * (1 - value); } - /** - * Constructor that creates a layer for the neural network given the number of inputs - * and outputs. - * @param inputSize - * @param outputSize - * @constructor - */ - function Layer(inputSize, outputSize) { - this.output = Matrix.zeros(1, outputSize).getRow(0); - this.input = Matrix.zeros(1, inputSize + 1).getRow(0); //+1 for bias term - this.deltaWeights = Matrix.zeros(1, (1 + inputSize) * outputSize).getRow(0); - this.weights = randomInitialzeWeights(this.deltaWeights.length, inputSize, outputSize); - this.isSigmoid = true; - } - - /** - * Function that performs the forward propagation for the current layer - * @param {Array} input - output from the previous layer. - * @returns {Array} output - output for the next layer. - */ - Layer.prototype.forward = function (input) { - this.input = input.slice(); - this.input.push(1); // bias - var offs = 0; // offset used to get the current weights in the current perceptron - this.output = Matrix.zeros(1, this.output.length).getRow(0); - - for(var i = 0; i < this.output.length; ++i) { - for(var j = 0 ; j < this.input.length; ++j) { - this.output[i] += this.weights[offs + j] * this.input[j]; - } - if(this.isSigmoid) - this.output[i] = sigmoid(this.output[i]); - - offs += this.input.length; - } - - return this.output.slice(); - }; - - /** - * Function that performs the backpropagation algorithm for the current layer. - * @param {Array} error - errors from the previous layer. - * @param {Number} learningRate - Learning rate for the actual layer. - * @param {Number} momentum - The regularizarion term. - * @returns {Array} the error for the next layer. - */ - Layer.prototype.train = function (error, learningRate, momentum) { - var offs = 0; - var nextError = Matrix.zeros(1, this.input.length).getRow(0);//new Array(this.input.length); - - for(var i = 0; i < this.output.length; ++i) { - var delta = error[i]; - - if(this.isSigmoid) - delta *= sigmoidGradient(this.output[i]); - - for(var j = 0; j < this.input.length; ++j) { - var index = offs + j; - nextError[j] += this.weights[index] * delta; - - var deltaWeight = this.input[j] * delta * learningRate; - this.weights[index] += this.deltaWeights[index] * momentum + deltaWeight; - this.deltaWeights[index] = deltaWeight; - } - - offs += this.input.length; - } - - return nextError; - }; - /***/ } /******/ ]) diff --git a/package.json b/package.json index eec62ee..e9b0434 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ml", - "version": "0.11.2", + "version": "0.12.0", "description": "Machine learning tools", "main": "src/index.js", "scripts": {