Socket
Socket
Sign inDemoInstall

ml-fnn

Package Overview
Dependencies
5
Maintainers
7
Versions
11
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 4.0.0 to 5.0.0

FeedForwardNeuralNetwork.js

24

History.md

@@ -0,1 +1,25 @@

# [5.0.0](https://github.com/mljs/feedforward-neural-networks/compare/v4.0.0...v5.0.0) (2019-06-29)
### Bug Fixes
* test 'Big test cas 2' is ignored, because won't never work with current implementation ([5a92c34](https://github.com/mljs/feedforward-neural-networks/commit/5a92c34))
### chore
* update dependencies and remove support for Node.js 6 ([bb4516d](https://github.com/mljs/feedforward-neural-networks/commit/bb4516d))
### Features
* Add the possibility to retrain a pre-trained model ([db995e4](https://github.com/mljs/feedforward-neural-networks/commit/db995e4))
### BREAKING CHANGES
* Node.js 6 is no longer supported.
<a name="4.0.0"></a>

@@ -2,0 +26,0 @@ # [4.0.0](https://github.com/mljs/feedforward-neural-networks/compare/v3.0.0...v4.0.0) (2017-07-21)

37

package.json
{
"name": "ml-fnn",
"version": "4.0.0",
"version": "5.0.0",
"description": "feedforward neural networks library",
"main": "src/FeedForwardNeuralNetwork.js",
"directories": {
"lib": "src",
"test": "test"
},
"main": "FeedForwardNeuralNetwork.js",
"files": [
"src",
"FeedForwardNeuralNetwork.js"
],
"scripts": {
"eslint": "eslint src test",
"compile": "rollup -c",
"eslint": "eslint src",
"eslint-fix": "npm run eslint -- --fix",
"test": "npm run test-mocha && npm run eslint",
"test-travis": "istanbul cover node_modules/.bin/_mocha --report lcovonly -- --require should --reporter dot --recursive",
"test-mocha": "mocha --require should --reporter mocha-better-spec-reporter"
"prepublishOnly": "npm run compile",
"test": "npm run test-coverage && npm run eslint",
"test-only": "jest",
"test-coverage": "jest --coverage"
},

@@ -35,12 +37,13 @@ "repository": {

"dependencies": {
"ml-matrix": "^5.0.0"
"ml-matrix": "^6.1.2"
},
"devDependencies": {
"eslint": "^4.1.1",
"eslint-config-cheminfo": "^1.8.0",
"eslint-plugin-no-only-tests": "^2.0.0",
"mocha": "^3.4.2",
"mocha-better-spec-reporter": "^3.1.0",
"should": "^11.2.1"
"@babel/plugin-transform-modules-commonjs": "^7.4.4",
"eslint": "^6.0.1",
"eslint-config-cheminfo": "^1.20.1",
"eslint-plugin-import": "^2.18.0",
"eslint-plugin-jest": "^22.7.1",
"jest": "^24.8.0",
"rollup": "^1.16.3"
}
}
# Feedforward Neural Network
[![NPM version][npm-image]][npm-url]
[![build status][travis-image]][travis-url]
[![David deps][david-image]][david-url]
[![npm download][download-image]][download-url]
[![NPM version][npm-image]][npm-url]
[![build status][travis-image]][travis-url]
[![npm download][download-image]][download-url]

@@ -12,3 +11,3 @@ A implementation of feedforward neural networks in javascript based on wildml [implementation](http://www.wildml.com/2015/09/implementing-a-neural-network-from-scratch/).

`$ npm install ml-fnn`
`$ npm i ml-fnn`

@@ -25,5 +24,3 @@ ## [API Documentation](https://mljs.github.io/feedforward-neural-networks/)

[travis-url]: https://travis-ci.org/mljs/feedforward-neural-networks
[david-image]: https://img.shields.io/david/mljs/feedforward-neural-networks.svg?style=flat-square
[david-url]: https://david-dm.org/mljs/feedforward-neural-networks
[download-image]: https://img.shields.io/npm/dm/ml-fnn.svg?style=flat-square
[download-url]: https://npmjs.org/package/ml-fnn

@@ -1,88 +0,86 @@

'use strict';
function logistic(val) {
return 1 / (1 + Math.exp(-val));
return 1 / (1 + Math.exp(-val));
}
function expELU(val, param) {
return val < 0 ? param * (Math.exp(val) - 1) : val;
return val < 0 ? param * (Math.exp(val) - 1) : val;
}
function softExponential(val, param) {
if (param < 0) {
return -Math.log(1 - param * (val + param)) / param;
}
if (param > 0) {
return ((Math.exp(param * val) - 1) / param) + param;
}
return val;
if (param < 0) {
return -Math.log(1 - param * (val + param)) / param;
}
if (param > 0) {
return ((Math.exp(param * val) - 1) / param) + param;
}
return val;
}
function softExponentialPrime(val, param) {
if (param < 0) {
return 1 / (1 - param * (param + val));
} else {
return Math.exp(param * val);
}
if (param < 0) {
return 1 / (1 - param * (param + val));
} else {
return Math.exp(param * val);
}
}
const ACTIVATION_FUNCTIONS = {
tanh: {
activation: Math.tanh,
derivate: val => 1 - (val * val)
},
identity: {
activation: val => val,
derivate: () => 1
},
logistic: {
activation: logistic,
derivate: val => logistic(val) * (1 - logistic(val))
},
arctan: {
activation: Math.atan,
derivate: val => 1 / (val * val + 1)
},
softsign: {
activation: val => val / (1 + Math.abs(val)),
derivate: val => 1 / ((1 + Math.abs(val)) * (1 + Math.abs(val)))
},
relu: {
activation: val => val < 0 ? 0 : val,
derivate: val => val < 0 ? 0 : 1
},
softplus: {
activation: val => Math.log(1 + Math.exp(val)),
derivate: val => 1 / (1 + Math.exp(-val))
},
bent: {
activation: val => ((Math.sqrt(val * val + 1) - 1) / 2) + val,
derivate: val => (val / (2 * Math.sqrt(val * val + 1))) + 1
},
sinusoid: {
activation: Math.sin,
derivate: Math.cos
},
sinc: {
activation: val => val === 0 ? 1 : Math.sin(val) / val,
derivate: val => val === 0 ? 0 : (Math.cos(val) / val) - (Math.sin(val) / (val * val))
},
gaussian: {
activation: val => Math.exp(-(val * val)),
derivate: val => -2 * val * Math.exp(-(val * val))
},
'parametric-relu': {
activation: (val, param) => val < 0 ? param * val : val,
derivate: (val, param) => val < 0 ? param : 1
},
'exponential-elu': {
activation: expELU,
derivate: (val, param) => val < 0 ? expELU(val, param) + param : 1
},
'soft-exponential': {
activation: softExponential,
derivate: softExponentialPrime
}
tanh: {
activation: Math.tanh,
derivate: (val) => 1 - (val * val)
},
identity: {
activation: (val) => val,
derivate: () => 1
},
logistic: {
activation: logistic,
derivate: (val) => logistic(val) * (1 - logistic(val))
},
arctan: {
activation: Math.atan,
derivate: (val) => 1 / (val * val + 1)
},
softsign: {
activation: (val) => val / (1 + Math.abs(val)),
derivate: (val) => 1 / ((1 + Math.abs(val)) * (1 + Math.abs(val)))
},
relu: {
activation: (val) => (val < 0 ? 0 : val),
derivate: (val) => (val < 0 ? 0 : 1)
},
softplus: {
activation: (val) => Math.log(1 + Math.exp(val)),
derivate: (val) => 1 / (1 + Math.exp(-val))
},
bent: {
activation: (val) => ((Math.sqrt(val * val + 1) - 1) / 2) + val,
derivate: (val) => (val / (2 * Math.sqrt(val * val + 1))) + 1
},
sinusoid: {
activation: Math.sin,
derivate: Math.cos
},
sinc: {
activation: (val) => (val === 0 ? 1 : Math.sin(val) / val),
derivate: (val) => (val === 0 ? 0 : (Math.cos(val) / val) - (Math.sin(val) / (val * val)))
},
gaussian: {
activation: (val) => Math.exp(-(val * val)),
derivate: (val) => -2 * val * Math.exp(-(val * val))
},
'parametric-relu': {
activation: (val, param) => (val < 0 ? param * val : val),
derivate: (val, param) => (val < 0 ? param : 1)
},
'exponential-elu': {
activation: expELU,
derivate: (val, param) => (val < 0 ? expELU(val, param) + param : 1)
},
'soft-exponential': {
activation: softExponential,
derivate: softExponentialPrime
}
};
module.exports = ACTIVATION_FUNCTIONS;
export default ACTIVATION_FUNCTIONS;

@@ -1,217 +0,237 @@

'use strict';
import { Matrix } from 'ml-matrix';
const Matrix = require('ml-matrix').Matrix;
import { Layer } from './Layer';
import { OutputLayer } from './OutputLayer';
import ACTIVATION_FUNCTIONS from './activationFunctions';
const Layer = require('./Layer');
const OutputLayer = require('./OutputLayer');
const Utils = require('./utils');
const ACTIVATION_FUNCTIONS = require('./activationFunctions');
export default class FeedForwardNeuralNetworks {
/**
* Create a new Feedforward neural network model.
* @class FeedForwardNeuralNetworks
* @param {object} [options]
* @param {Array} [options.hiddenLayers=[10]] - Array that contains the sizes of the hidden layers.
* @param {number} [options.iterations=50] - Number of iterations at the training step.
* @param {number} [options.learningRate=0.01] - Learning rate of the neural net (also known as epsilon).
* @param {number} [options.regularization=0.01] - Regularization parameter af the neural net.
* @param {string} [options.activation='tanh'] - activation function to be used. (options: 'tanh'(default),
* 'identity', 'logistic', 'arctan', 'softsign', 'relu', 'softplus', 'bent', 'sinusoid', 'sinc', 'gaussian').
* (single-parametric options: 'parametric-relu', 'exponential-relu', 'soft-exponential').
* @param {number} [options.activationParam=1] - if the selected activation function needs a parameter.
*/
constructor(options) {
options = options || {};
if (options.model) {
// load network
this.hiddenLayers = options.hiddenLayers;
this.iterations = options.iterations;
this.learningRate = options.learningRate;
this.regularization = options.regularization;
this.dicts = options.dicts;
this.activation = options.activation;
this.activationParam = options.activationParam;
this.model = new Array(options.layers.length);
class FeedForwardNeuralNetworks {
for (var i = 0; i < this.model.length - 1; ++i) {
this.model[i] = Layer.load(options.layers[i]);
}
this.model[this.model.length - 1] = OutputLayer.load(options.layers[this.model.length - 1]);
} else {
// default constructor
this.hiddenLayers = options.hiddenLayers || [10];
this.iterations = options.iterations || 50;
/**
* Create a new Feedforword neural network model.
* @class FeedForwardNeuralNetworks
* @param {object} [options]
* @param {Array} [options.hiddenLayers=[10]] - Array that contains the sizes of the hidden layers.
* @param {number} [options.iterations=50] - Number of iterations at the training step.
* @param {number} [options.learningRate=0.01] - Learning rate of the neural net (also known as epsilon).
* @param {number} [options.regularization=0.01] - Regularization parameter af the neural net.
* @param {string} [options.activation='tanh'] - activation function to be used. (options: 'tanh'(default),
* 'identity', 'logistic', 'arctan', 'softsign', 'relu', 'softplus', 'bent', 'sinusoid', 'sinc', 'gaussian').
* (single-parametric options: 'parametric-relu', 'exponential-relu', 'soft-exponential').
* @param {number} [options.activationParam=1] - if the selected activation function needs a parameter.
*/
constructor(options) {
options = options || {};
if (options.model) {
// load network
this.hiddenLayers = options.hiddenLayers;
this.iterations = options.iterations;
this.learningRate = options.learningRate;
this.regularization = options.regularization;
this.dicts = options.dicts;
this.activation = options.activation;
this.activationParam = options.activationParam;
this.model = new Array(options.layers.length);
this.learningRate = options.learningRate || 0.01;
this.regularization = options.regularization || 0.01;
for (var i = 0; i < this.model.length - 1; ++i) {
this.model[i] = Layer.load(options.layers[i]);
}
this.model[this.model.length - 1] = OutputLayer.load(options.layers[this.model.length - 1]);
} else {
// default constructor
this.hiddenLayers = options.hiddenLayers || [10];
this.iterations = options.iterations || 50;
this.learningRate = options.learningRate || 0.01;
this.regularization = options.regularization || 0.01;
this.activation = options.activation || 'tanh';
this.activationParam = options.activationParam || 1;
if (!(this.activation in Object.keys(ACTIVATION_FUNCTIONS))) {
this.activation = 'tanh';
}
}
this.activation = options.activation || 'tanh';
this.activationParam = options.activationParam || 1;
if (!(this.activation in Object.keys(ACTIVATION_FUNCTIONS))) {
this.activation = 'tanh';
}
}
}
/**
* @private
* Function that build and initialize the neural net.
* @param {number} inputSize - total of features to fit.
* @param {number} outputSize - total of labels of the prediction set.
*/
buildNetwork(inputSize, outputSize) {
var size = 2 + (this.hiddenLayers.length - 1);
this.model = new Array(size);
/**
* @private
* Function that build and initialize the neural net.
* @param {number} inputSize - total of features to fit.
* @param {number} outputSize - total of labels of the prediction set.
*/
buildNetwork(inputSize, outputSize) {
var size = 2 + (this.hiddenLayers.length - 1);
this.model = new Array(size);
// input layer
this.model[0] = new Layer({
inputSize: inputSize,
outputSize: this.hiddenLayers[0],
activation: this.activation,
activationParam: this.activationParam,
regularization: this.regularization,
epsilon: this.learningRate
});
// input layer
this.model[0] = new Layer({
inputSize: inputSize,
outputSize: this.hiddenLayers[0],
activation: this.activation,
activationParam: this.activationParam,
regularization: this.regularization,
epsilon: this.learningRate
});
// hidden layers
for (var i = 1; i < this.hiddenLayers.length; ++i) {
this.model[i] = new Layer({
inputSize: this.hiddenLayers[i - 1],
outputSize: this.hiddenLayers[i],
activation: this.activation,
activationParam: this.activationParam,
regularization: this.regularization,
epsilon: this.learningRate
});
}
// output layer
this.model[size - 1] = new OutputLayer({
inputSize: this.hiddenLayers[this.hiddenLayers.length - 1],
outputSize: outputSize,
activation: this.activation,
activationParam: this.activationParam,
regularization: this.regularization,
epsilon: this.learningRate
});
// hidden layers
for (var i = 1; i < this.hiddenLayers.length; ++i) {
this.model[i] = new Layer({
inputSize: this.hiddenLayers[i - 1],
outputSize: this.hiddenLayers[i],
activation: this.activation,
activationParam: this.activationParam,
regularization: this.regularization,
epsilon: this.learningRate
});
}
/**
* Train the neural net with the given features and labels.
* @param {Matrix|Array} features
* @param {Matrix|Array} labels
*/
train(features, labels) {
features = Matrix.checkMatrix(features);
this.dicts = Utils.dictOutputs(labels);
// output layer
this.model[size - 1] = new OutputLayer({
inputSize: this.hiddenLayers[this.hiddenLayers.length - 1],
outputSize: outputSize,
activation: this.activation,
activationParam: this.activationParam,
regularization: this.regularization,
epsilon: this.learningRate
});
}
var inputSize = features.columns;
var outputSize = Object.keys(this.dicts.inputs).length;
/**
* Train the neural net with the given features and labels.
* @param {Matrix|Array} features
* @param {Matrix|Array} labels
*/
train(features, labels) {
features = Matrix.checkMatrix(features);
this.dicts = dictOutputs(labels);
this.buildNetwork(inputSize, outputSize);
var inputSize = features.columns;
var outputSize = Object.keys(this.dicts.inputs).length;
for (var i = 0; i < this.iterations; ++i) {
var probabilities = this.propagate(features);
this.backpropagation(features, labels, probabilities);
}
if (!this.model) {
this.buildNetwork(inputSize, outputSize);
}
/**
* @private
* Propagate the input(training set) and retrives the probabilities of each class.
* @param {Matrix} X
* @return {Matrix} probabilities of each class.
*/
propagate(X) {
var input = X;
for (var i = 0; i < this.model.length; ++i) {
//console.log(i);
input = this.model[i].forward(input);
}
for (var i = 0; i < this.iterations; ++i) {
var probabilities = this.propagate(features);
this.backpropagation(features, labels, probabilities);
}
}
// get probabilities
return input.divColumnVector(Utils.sumRow(input));
/**
* @private
* Propagate the input(training set) and retrives the probabilities of each class.
* @param {Matrix} X
* @return {Matrix} probabilities of each class.
*/
propagate(X) {
var input = X;
for (var i = 0; i < this.model.length; ++i) {
input = this.model[i].forward(input);
}
/**
* @private
* Function that applies the backpropagation algorithm on each layer of the network
* in order to fit the features and labels.
* @param {Matrix} features
* @param {Array} labels
* @param {Matrix} probabilities - probabilities of each class of the feature set.
*/
backpropagation(features, labels, probabilities) {
for (var i = 0; i < probabilities.length; ++i) {
probabilities[i][this.dicts.inputs[labels[i]]] -= 1;
}
// get probabilities
return input.divColumnVector(input.sum('row'));
}
// remember, the last delta doesn't matter
var delta = probabilities;
for (i = this.model.length - 1; i >= 0; --i) {
var a = i > 0 ? this.model[i - 1].a : features;
delta = this.model[i].backpropagation(delta, a);
}
/**
* @private
* Function that applies the backpropagation algorithm on each layer of the network
* in order to fit the features and labels.
* @param {Matrix} features
* @param {Array} labels
* @param {Matrix} probabilities - probabilities of each class of the feature set.
*/
backpropagation(features, labels, probabilities) {
for (var i = 0; i < probabilities.rows; ++i) {
probabilities.set(i, this.dicts.inputs[labels[i]], probabilities.get(i, this.dicts.inputs[labels[i]]) - 1);
}
for (i = 0; i < this.model.length; ++i) {
this.model[i].update();
}
// remember, the last delta doesn't matter
var delta = probabilities;
for (i = this.model.length - 1; i >= 0; --i) {
var a = i > 0 ? this.model[i - 1].a : features;
delta = this.model[i].backpropagation(delta, a);
}
/**
* Predict the output given the feature set.
* @param {Array|Matrix} features
* @return {Array}
*/
predict(features) {
features = Matrix.checkMatrix(features);
var outputs = new Array(features.rows);
var probabilities = this.propagate(features);
for (var i = 0; i < features.rows; ++i) {
outputs[i] = this.dicts.outputs[probabilities.maxRowIndex(i)[1]];
}
for (i = 0; i < this.model.length; ++i) {
this.model[i].update();
}
}
return outputs;
/**
* Predict the output given the feature set.
* @param {Array|Matrix} features
* @return {Array}
*/
predict(features) {
features = Matrix.checkMatrix(features);
var outputs = new Array(features.rows);
var probabilities = this.propagate(features);
for (var i = 0; i < features.rows; ++i) {
outputs[i] = this.dicts.outputs[probabilities.maxRowIndex(i)[1]];
}
/**
* Export the current model to JSON.
* @return {object} model
*/
toJSON() {
var model = {
model: 'FNN',
hiddenLayers: this.hiddenLayers,
iterations: this.iterations,
learningRate: this.learningRate,
regularization: this.regularization,
activation: this.activation,
activationParam: this.activationParam,
dicts: this.dicts,
layers: new Array(this.model.length)
};
return outputs;
}
for (var i = 0; i < this.model.length; ++i) {
model.layers[i] = this.model[i].toJSON();
}
/**
* Export the current model to JSON.
* @return {object} model
*/
toJSON() {
var model = {
model: 'FNN',
hiddenLayers: this.hiddenLayers,
iterations: this.iterations,
learningRate: this.learningRate,
regularization: this.regularization,
activation: this.activation,
activationParam: this.activationParam,
dicts: this.dicts,
layers: new Array(this.model.length)
};
return model;
for (var i = 0; i < this.model.length; ++i) {
model.layers[i] = this.model[i].toJSON();
}
/**
* Load a Feedforward Neural Network with the current model.
* @param {object} model
* @return {FeedForwardNeuralNetworks}
*/
static load(model) {
if (model.model !== 'FNN') {
throw new RangeError('the current model is not a feed forward network');
}
return model;
}
return new FeedForwardNeuralNetworks(model);
/**
* Load a Feedforward Neural Network with the current model.
* @param {object} model
* @return {FeedForwardNeuralNetworks}
*/
static load(model) {
if (model.model !== 'FNN') {
throw new RangeError('the current model is not a feed forward network');
}
return new FeedForwardNeuralNetworks(model);
}
}
module.exports = FeedForwardNeuralNetworks;
/**
* @private
* Method that given an array of labels(predictions), returns two dictionaries, one to transform from labels to
* numbers and other in the reverse way
* @param {Array} array
* @return {object}
*/
function dictOutputs(array) {
var inputs = {};
var outputs = {};
var index = 0;
for (var i = 0; i < array.length; i += 1) {
if (inputs[array[i]] === undefined) {
inputs[array[i]] = index;
outputs[index] = array[i];
index++;
}
}
return {
inputs: inputs,
outputs: outputs
};
}

@@ -1,10 +0,7 @@

'use strict';
import { Matrix } from 'ml-matrix';
var Matrix = require('ml-matrix').Matrix;
import ACTIVATION_FUNCTIONS from './activationFunctions';
var Utils = require('./utils');
const ACTIVATION_FUNCTIONS = require('./activationFunctions');
class Layer {
/**
export class Layer {
/**
* @private

@@ -20,41 +17,39 @@ * Create a new layer with the given options

*/
constructor(options) {
this.inputSize = options.inputSize;
this.outputSize = options.outputSize;
this.regularization = options.regularization;
this.epsilon = options.epsilon;
this.activation = options.activation;
this.activationParam = options.activationParam;
constructor(options) {
this.inputSize = options.inputSize;
this.outputSize = options.outputSize;
this.regularization = options.regularization;
this.epsilon = options.epsilon;
this.activation = options.activation;
this.activationParam = options.activationParam;
var selectedFunction = ACTIVATION_FUNCTIONS[options.activation];
var params = selectedFunction.activation.length;
var selectedFunction = ACTIVATION_FUNCTIONS[options.activation];
var params = selectedFunction.activation.length;
var actFunction = params > 1 ? val => selectedFunction.activation(val, options.activationParam) : selectedFunction.activation;
var derFunction = params > 1 ? val => selectedFunction.derivate(val, options.activationParam) : selectedFunction.derivate;
var actFunction = params > 1 ? (val) => selectedFunction.activation(val, options.activationParam) : selectedFunction.activation;
var derFunction = params > 1 ? (val) => selectedFunction.derivate(val, options.activationParam) : selectedFunction.derivate;
this.activationFunction = function (i, j) {
this[i][j] = actFunction(this[i][j]);
};
this.derivate = function (i, j) {
this[i][j] = derFunction(this[i][j]);
};
this.activationFunction = function (i, j) {
this.set(i, j, actFunction(this.get(i, j)));
};
this.derivate = function (i, j) {
this.set(i, j, derFunction(this.get(i, j)));
};
if (options.model) {
// load model
this.W = Matrix.checkMatrix(options.W);
this.b = Matrix.checkMatrix(options.b);
if (options.model) {
// load model
this.W = Matrix.checkMatrix(options.W);
this.b = Matrix.checkMatrix(options.b);
} else {
// default constructor
this.W = Matrix.rand(this.inputSize, this.outputSize);
this.b = Matrix.zeros(1, this.outputSize);
} else {
// default constructor
this.W = Matrix.rand(this.inputSize, this.outputSize);
this.b = Matrix.zeros(1, this.outputSize);
this.W.apply(function (i, j) {
this[i][j] /= Math.sqrt(options.inputSize);
});
}
this.W.apply(function (i, j) {
this.set(i, j, this.get(i, j) / Math.sqrt(options.inputSize));
});
}
}
/**
/**
* @private

@@ -65,10 +60,10 @@ * propagate the given input through the current layer.

*/
forward(X) {
var z = X.mmul(this.W).addRowVector(this.b);
z.apply(this.activationFunction);
this.a = z.clone();
return z;
}
forward(X) {
var z = X.mmul(this.W).addRowVector(this.b);
z.apply(this.activationFunction);
this.a = z.clone();
return z;
}
/**
/**
* @private

@@ -80,21 +75,21 @@ * apply backpropagation algorithm at the current layer

*/
backpropagation(delta, a) {
this.dW = a.transposeView().mmul(delta);
this.db = Utils.sumCol(delta);
backpropagation(delta, a) {
this.dW = a.transpose().mmul(delta);
this.db = Matrix.rowVector(delta.sum('column'));
var aCopy = a.clone();
return delta.mmul(this.W.transposeView()).mul(aCopy.apply(this.derivate));
}
var aCopy = a.clone();
return delta.mmul(this.W.transpose()).mul(aCopy.apply(this.derivate));
}
/**
/**
* @private
* Function that updates the weights at the current layer with the derivatives.
*/
update() {
this.dW.add(this.W.clone().mul(this.regularization));
this.W.add(this.dW.mul(-this.epsilon));
this.b.add(this.db.mul(-this.epsilon));
}
update() {
this.dW.add(this.W.clone().mul(this.regularization));
this.W.add(this.dW.mul(-this.epsilon));
this.b.add(this.db.mul(-this.epsilon));
}
/**
/**
* @private

@@ -104,16 +99,16 @@ * Export the current layer to JSON.

*/
toJSON() {
return {
model: 'Layer',
inputSize: this.inputSize,
outputSize: this.outputSize,
regularization: this.regularization,
epsilon: this.epsilon,
activation: this.activation,
W: this.W,
b: this.b
};
}
toJSON() {
return {
model: 'Layer',
inputSize: this.inputSize,
outputSize: this.outputSize,
regularization: this.regularization,
epsilon: this.epsilon,
activation: this.activation,
W: this.W,
b: this.b
};
}
/**
/**
* @private

@@ -124,11 +119,8 @@ * Creates a new Layer with the given model.

*/
static load(model) {
if (model.model !== 'Layer') {
throw new RangeError('the current model is not a Layer model');
}
return new Layer(model);
static load(model) {
if (model.model !== 'Layer') {
throw new RangeError('the current model is not a Layer model');
}
return new Layer(model);
}
}
module.exports = Layer;

@@ -1,23 +0,19 @@

'use strict';
import { Layer } from './Layer';
var Layer = require('./Layer');
export class OutputLayer extends Layer {
constructor(options) {
super(options);
class OutputLayer extends Layer {
constructor(options) {
super(options);
this.activationFunction = function (i, j) {
this.set(i, j, Math.exp(this.get(i, j)));
};
}
this.activationFunction = function (i, j) {
this[i][j] = Math.exp(this[i][j]);
};
static load(model) {
if (model.model !== 'Layer') {
throw new RangeError('the current model is not a Layer model');
}
static load(model) {
if (model.model !== 'Layer') {
throw new RangeError('the current model is not a Layer model');
}
return new OutputLayer(model);
}
return new OutputLayer(model);
}
}
module.exports = OutputLayer;
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc