diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/activationFunctions.js | 54 | ||||
-rw-r--r-- | src/binaryConverter.js | 56 | ||||
-rw-r--r-- | src/demo.js | 18 | ||||
-rw-r--r-- | src/layer.js | 53 | ||||
-rw-r--r-- | src/nnetwork.js | 36 |
5 files changed, 217 insertions, 0 deletions
diff --git a/src/activationFunctions.js b/src/activationFunctions.js new file mode 100644 index 0000000..6ccc046 --- /dev/null +++ b/src/activationFunctions.js @@ -0,0 +1,54 @@ +console.log('Hello from activationFunctions.js'); + +let sigmoidExpression = '1 / (1 + exp(-x))'; +let dSigmoidExpression = math.derivative(sigmoidExpression, 'x'); + +sigmoidExpression = math.compile(sigmoidExpression); +dSigmoidExpression = dSigmoidExpression.toString(); +dSigmoidExpression = math.compile(dSigmoidExpression); + +const sigmoid = function(xValue) { + return sigmoidExpression.evaluate({x: xValue}); +}; + +const dSigmoid_dx = function(xValue) { + return dSigmoidExpression.evaluate({x: xValue}); +}; + +const relu = function(x) { + return x * (x > 0); +}; + +const dRelu_dx = function(x) { + return Number(x > 0); +}; + +const identity = function(x) { + return x; +}; + +const dIdentity_dx = function(x) { + return 1; +}; + +const matrixMethod = function(elementMethod) { + const method = function(matrix) { + return math.map(matrix, (element) => elementMethod(element)); + }; + return method; +}; + +activationFunctionList = { + 'relu': { + gx: matrixMethod(relu), + dg_dx: matrixMethod(dRelu_dx) + }, + 'sigmoid': { + gx: matrixMethod(sigmoid), + dg_dx: matrixMethod(dSigmoid_dx) + }, + 'identity': { + gx: matrixMethod(identity), + dg_dx: matrixMethod(dIdentity_dx) + } +};
\ No newline at end of file diff --git a/src/binaryConverter.js b/src/binaryConverter.js new file mode 100644 index 0000000..33cbb9c --- /dev/null +++ b/src/binaryConverter.js @@ -0,0 +1,56 @@ +const math = require('mathjs'); + +class BinaryConverter { + constructor(inputDigits) { + this._inputDigits = inputDigits; + let maxActivation = math.ones(this._inputDigits); + maxActivation = this.convertBinary(maxActivation); + maxActivation++; + this._outputDigits = maxActivation; + this._inputActivation = math.zeros(this._inputDigits); + this._outputActivation = math.zeros(this._outputDigits); + this._integer = 0; + this._outputActivation._data[this._integer] = 1; + } + convertBinary(binaryVector) { + let i = binaryVector._data; + i = i.join(''); + i = parseInt(i, 2) + return i; + } + convertInteger(inputInteger) { + let b = inputInteger.toString(2); + b = b.padStart(this._inputDigits, '0'); + b = b.split(''); + b = b.map(Number); + b = math.matrix(b); + return b; + } + randomInput() { + let randomBinaryVector = math.matrix(math.random([this._inputDigits])); + randomBinaryVector = math.map(randomBinaryVector, (element) => math.floor(element * 2)); + this.inputActivation = randomBinaryVector; + return randomBinaryVector; + } + set inputActivation(inputVector) { + this._inputActivation = inputVector; + this._integer = this.convertBinary(inputVector); + this._outputActivation = math.zeros(this._outputDigits); + this._outputActivation._data[this._integer] = 1; + } + get inputActivation() { + return this._inputActivation; + } + get outputActivation() { + return this._outputActivation; + } + set integer(inputInteger) { + this._integer = inputInteger; + this._inputActivation = this.convertInteger(inputInteger); + this._outputActivation = math.zeros(this._outputDigits); + this._outputActivation._data[this._integer] = 1; + } + get integer() { + return this._integer; + } +}; diff --git a/src/demo.js b/src/demo.js new file mode 100644 index 0000000..75fa61a --- /dev/null +++ b/src/demo.js @@ -0,0 +1,18 @@ +console.log('Hello from demo.js'); + +let v0 = []; +let v1 = [1, 2, 3, 4]; +let v2 = [5, 6, 7, 8]; + +v0 = [v1, v2] + +v0 = math.matrix(v0); +v1 = math.matrix(v1); +v2 = math.matrix(v2); + +v1 = math.transpose(v1); +v2 = math.transpose(v2); + +let v3 = math.concat(v1, v2, 0); + +console.log(v3); diff --git a/src/layer.js b/src/layer.js new file mode 100644 index 0000000..65b14ec --- /dev/null +++ b/src/layer.js @@ -0,0 +1,53 @@ +console.log('Hello from layer.js'); + +class Layer { + constructor(inputCount, nodeCount, activationFunctionName) { + this.gx = activationFunctionList[activationFunctionName].gx; + this.dg_dx = activationFunctionList[activationFunctionName].dg_dx; + this.weights = math.matrix(math.random([inputCount, nodeCount])); + this.weights = math.map(this.weights, (element) => (element - 0.5) * 2); + this.biases = math.matrix(math.zeros([nodeCount])); + this.z = math.matrix(math.zeros([nodeCount])); + this.dc_dw_vectors = math.matrix(math.zeros(this.weights._size)); + this.dc_db_vectors = math.matrix(math.zeros(this.biases._size)); + console.log('dc_db_vectors', this.dc_db_vectors); + } + forwardPropogation(activationInput) { + this.ai = activationInput; + let z = math.multiply(activationInput, this.weights); + z = math.add(z, this.biases); + this.z = z; + let ao = this.gx(z); + return ao; + } + backPropogation(dc_dao) { + let dao_dz = this.dg_dx(this.z); + let dc_db = math.dotMultiply(dao_dz, dc_dao); + let dz_dw = math.ones(this.weights._size); + dz_dw = math.multiply(this.ai, dz_dw); + let dc_dw = math.dotMultiply(dc_db, dz_dw); + let dz_dai = this.weights; + let dc_dai = math.multiply(dz_dai, dc_db); + + //store cost vectors for gradient descent + console.log('dc_db_vectors', this.dc_db_vectors); + console.log('dc_db', dc_db); + this.dc_db_vectors = math.concat(this.dc_db_vectors, dc_db, 0); + this.dc_dw_vectors = math.concat(this.dc_dw_vectors, dc_dw, 0); + console.log('dc_db_vectors', this.dc_db_vectors); + + return dc_dai; + } + gradientDescent(learningRate) { + + console.log(this.dc_dw_vectors); + console.log(this.dc_db_vectors); + + let dc_dw_avg = math.mean(this.dc_dw_vectors, 0) + let dc_db_avg = math.mean(this.dc_db_vectors, 0) + this.weights = this.weights - (learningRate * dc_dw_avg); + this.biases = this.biases - (learningRate * dc_dw_avg); + this.dc_dw_vectors = math.matrix([]); + this.dc_db_vectors = math.matrix([]); + } +}; diff --git a/src/nnetwork.js b/src/nnetwork.js new file mode 100644 index 0000000..dd21491 --- /dev/null +++ b/src/nnetwork.js @@ -0,0 +1,36 @@ +console.log('Hello from nnetwork.js'); + +class NNetwork { + constructor(nodeCounts, activationFunctionNames, learningRate) { + this.learningRate = learningRate; + this.layers = []; + for (let i = 1; i < nodeCounts.length; i++) { + this.layers.push(new Layer(nodeCounts[i-1], nodeCounts[i], activationFunctionNames[i])); + }; + } + forwardPropogation(activationInput) { + let ao = activationInput; + this.layers.forEach(layer => { + ao = layer.forwardPropogation(ao); + }); + return ao; + } + backPropogation(activationInput, targetOutput) { + let ao = this.forwardPropogation(activationInput); + let cost = math.subtract(ao, targetOutput); + let dc_da = math.multiply(cost, 2); + cost = math.map(cost, element => element ** 2); + cost = math.sum(cost) + this.layers.reverse().forEach(layer => { + dc_da = layer.backPropogation(dc_da); + }); + this.layers.reverse() + return cost; + } + gradientDescent() { + this.layers.reverse().forEach(layer => { + layer.gradientDescent(this.learningRate); + }); + this.layers.reverse() + } +}; |