summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTinWoodman92 <chrhodgden@gmail.com>2024-02-08 16:09:56 -0600
committerTinWoodman92 <chrhodgden@gmail.com>2024-02-08 16:09:56 -0600
commit62993dd59bbbfcdb07dbb5836d359fad6334f23e (patch)
tree4f0d310442c66050d0886eb82bebd9475e63803c
initial commit
-rw-r--r--.gitignore2
-rw-r--r--doc/nnetwork.svg119
-rw-r--r--doc/readme.md10
-rw-r--r--src/activationFunctions.js54
-rw-r--r--src/binaryConverter.js56
-rw-r--r--src/demo.js18
-rw-r--r--src/layer.js53
-rw-r--r--src/nnetwork.js36
-rw-r--r--test/unit_tests/activationFunctions.test.js23
-rw-r--r--test/unit_tests/binaryConverter.test.js33
-rw-r--r--test/unit_tests/layer.test.js15
-rw-r--r--test/unit_tests/nnetwork.test.js17
12 files changed, 436 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..0976e26
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+_*
+node_modules
diff --git a/doc/nnetwork.svg b/doc/nnetwork.svg
new file mode 100644
index 0000000..7cd35f4
--- /dev/null
+++ b/doc/nnetwork.svg
@@ -0,0 +1,119 @@
+<svg id='nnetwork' width='100' height='100' viewbox='0 0 100 100' xmlns='http://www.w3.org/2000/svg'>
+
+ <style>
+ :root {
+ --dark-stroke: 1;
+ --stroke-lightness: calc(100% - calc(var(--dark-stroke) * 100%));
+ --stroke-color: hsl(0, 0%, 50%);
+ --fill-color: hsl(0, 0%, 100%);
+ --n1-color: hsl(210, 100%, 50%);
+ --n2-color: hsl(300, 100%, 50%);
+ --stroke-width: 0.5;
+ --stroke-width-n: 5;
+ }
+
+ .node {
+ fill: var(--fill-color);
+ stroke: var(--stroke-color);
+ stroke-width: 3;
+ }
+
+ .weight {
+ stroke: var(--stroke-color);
+ stroke-width: var(--stroke-width);
+ fill-opacity: 0;
+ }
+
+ #node-n1 {
+ fill: var(--n1-color);
+ stroke: var(--n1-color);
+ }
+
+ #node-n2 {
+ fill: var(--n2-color);
+ stroke: var(--n2-color);
+ }
+
+ #weight-n1 {
+ stroke: var(--n1-color);
+ stroke-width: var(--stroke-width-n);
+ }
+
+ #weight-n2 {
+ stroke: var(--n2-color);
+ stroke-width: var(--stroke-width-n);
+ }
+ </style>
+
+ <!--Weigths Layer 1-to-2 Node 1-->
+ <path class='weight' id='weight' d='M10 10, 37 10'/>
+ <path class='weight' id='weight' d='M10 50, 37 10'/>
+ <path class='weight' id='weight' d='M10 90, 37 10'/>
+
+ <!--Weigths Layer 1-to-2 Node 2-->
+ <path class='weight' id='weight-n1' d='M10 10, 37 50'/>
+ <path class='weight' id='weight' d='M10 50, 37 50'/>
+ <path class='weight' id='weight' d='M10 90, 37 50'/>
+
+ <!--Weigths Layer 1-to-2 Node 3-->
+ <path class='weight' id='weight' d='M10 10, 37 90'/>
+ <path class='weight' id='weight' d='M10 50, 37 90'/>
+ <path class='weight' id='weight' d='M10 90, 37 90'/>
+
+ <!--Weigths Layer 2-to-3 Node 1-->
+ <path class='weight' id='weight' d='M37 10, 64 10'/>
+ <path class='weight' id='weight' d='M37 50, 64 10'/>
+ <path class='weight' id='weight' d='M37 90, 64 10'/>
+
+ <!--Weigths Layer 2-to-3 Node 2-->
+ <path class='weight' id='weight-n2' d='M37 10, 64 50'/>
+ <path class='weight' id='weight' d='M37 50, 64 50'/>
+ <path class='weight' id='weight' d='M37 90, 64 50'/>
+
+ <!--Weigths Layer 2-to-3 Node 3-->
+ <path class='weight' id='weight' d='M37 10, 64 90'/>
+ <path class='weight' id='weight-n1' d='M37 50, 64 90'/>
+ <path class='weight' id='weight' d='M37 90, 64 90'/>
+
+ <!--Weigths Layer 3-to-4 Node 1-->
+ <path class='weight' id='weight' d='M64 10, 90 10'/>
+ <path class='weight' id='weight' d='M64 50, 90 10'/>
+ <path class='weight' id='weight' d='M64 90, 90 10'/>
+
+ <!--Weigths Layer 3-to-4 Node 2-->
+ <path class='weight' id='weight' d='M64 10, 90 50'/>
+ <path class='weight' id='weight' d='M64 50, 90 50'/>
+ <path class='weight' id='weight' d='M64 90, 90 50'/>
+
+ <!--Weigths Layer 3-to-4 Node 3-->
+ <path class='weight' id='weight' d='M64 10, 90 90'/>
+ <path class='weight' id='weight-n2' d='M64 50, 90 90'/>
+ <path class='weight' id='weight' d='M64 90, 90 90'/>
+
+ <!--Layer Lines-->
+ <path class='weight' id='weight-n1' d='M10 10, 10 90'/>
+ <path class='weight' id='weight-n2' d='M37 10, 37 90'/>
+ <path class='weight' id='weight-n1' d='M64 10, 64 90'/>
+ <path class='weight' id='weight-n2' d='M90 10, 90 90'/>
+
+ <!--Nodes Layer 1-->
+ <circle class='node' id='node-n1' cx='10' cy='10' r='7.5'/>
+ <circle class='node' id='node-n1' cx='10' cy='50' r='7.5'/>
+ <circle class='node' id='node-n1' cx='10' cy='90' r='7.5'/>
+
+ <!--Nodes Layer 2-->
+ <circle class='node' id='node-n2' cx='37' cy='10' r='7.5'/>
+ <circle class='node' id='node-n1' cx='37' cy='50' r='7.5'/>
+ <circle class='node' id='node-n2' cx='37' cy='90' r='7.5'/>
+
+ <!--Nodes Layer 3-->
+ <circle class='node' id='node-n1' cx='64' cy='10' r='7.5'/>
+ <circle class='node' id='node-n2' cx='64' cy='50' r='7.5'/>
+ <circle class='node' id='node-n1' cx='64' cy='90' r='7.5'/>
+
+ <!--Nodes Layer 4-->
+ <circle class='node' id='node-n2' cx='90' cy='10' r='7.5'/>
+ <circle class='node' id='node-n2' cx='90' cy='50' r='7.5'/>
+ <circle class='node' id='node-n2' cx='90' cy='90' r='7.5'/>
+
+</svg> \ No newline at end of file
diff --git a/doc/readme.md b/doc/readme.md
new file mode 100644
index 0000000..709fee1
--- /dev/null
+++ b/doc/readme.md
@@ -0,0 +1,10 @@
+![NNetwork Icon](nnetwork.svg)
+
+# NNetwork.js
+
+## A simple neural network as a Javascript Class Object
+
+Using the math.js library, this package is designed to run on a web browser.
+
+This package is currently under development.
+
diff --git a/src/activationFunctions.js b/src/activationFunctions.js
new file mode 100644
index 0000000..6ccc046
--- /dev/null
+++ b/src/activationFunctions.js
@@ -0,0 +1,54 @@
+console.log('Hello from activationFunctions.js');
+
+let sigmoidExpression = '1 / (1 + exp(-x))';
+let dSigmoidExpression = math.derivative(sigmoidExpression, 'x');
+
+sigmoidExpression = math.compile(sigmoidExpression);
+dSigmoidExpression = dSigmoidExpression.toString();
+dSigmoidExpression = math.compile(dSigmoidExpression);
+
+const sigmoid = function(xValue) {
+ return sigmoidExpression.evaluate({x: xValue});
+};
+
+const dSigmoid_dx = function(xValue) {
+ return dSigmoidExpression.evaluate({x: xValue});
+};
+
+const relu = function(x) {
+ return x * (x > 0);
+};
+
+const dRelu_dx = function(x) {
+ return Number(x > 0);
+};
+
+const identity = function(x) {
+ return x;
+};
+
+const dIdentity_dx = function(x) {
+ return 1;
+};
+
+const matrixMethod = function(elementMethod) {
+ const method = function(matrix) {
+ return math.map(matrix, (element) => elementMethod(element));
+ };
+ return method;
+};
+
+activationFunctionList = {
+ 'relu': {
+ gx: matrixMethod(relu),
+ dg_dx: matrixMethod(dRelu_dx)
+ },
+ 'sigmoid': {
+ gx: matrixMethod(sigmoid),
+ dg_dx: matrixMethod(dSigmoid_dx)
+ },
+ 'identity': {
+ gx: matrixMethod(identity),
+ dg_dx: matrixMethod(dIdentity_dx)
+ }
+}; \ No newline at end of file
diff --git a/src/binaryConverter.js b/src/binaryConverter.js
new file mode 100644
index 0000000..33cbb9c
--- /dev/null
+++ b/src/binaryConverter.js
@@ -0,0 +1,56 @@
+const math = require('mathjs');
+
+class BinaryConverter {
+ constructor(inputDigits) {
+ this._inputDigits = inputDigits;
+ let maxActivation = math.ones(this._inputDigits);
+ maxActivation = this.convertBinary(maxActivation);
+ maxActivation++;
+ this._outputDigits = maxActivation;
+ this._inputActivation = math.zeros(this._inputDigits);
+ this._outputActivation = math.zeros(this._outputDigits);
+ this._integer = 0;
+ this._outputActivation._data[this._integer] = 1;
+ }
+ convertBinary(binaryVector) {
+ let i = binaryVector._data;
+ i = i.join('');
+ i = parseInt(i, 2)
+ return i;
+ }
+ convertInteger(inputInteger) {
+ let b = inputInteger.toString(2);
+ b = b.padStart(this._inputDigits, '0');
+ b = b.split('');
+ b = b.map(Number);
+ b = math.matrix(b);
+ return b;
+ }
+ randomInput() {
+ let randomBinaryVector = math.matrix(math.random([this._inputDigits]));
+ randomBinaryVector = math.map(randomBinaryVector, (element) => math.floor(element * 2));
+ this.inputActivation = randomBinaryVector;
+ return randomBinaryVector;
+ }
+ set inputActivation(inputVector) {
+ this._inputActivation = inputVector;
+ this._integer = this.convertBinary(inputVector);
+ this._outputActivation = math.zeros(this._outputDigits);
+ this._outputActivation._data[this._integer] = 1;
+ }
+ get inputActivation() {
+ return this._inputActivation;
+ }
+ get outputActivation() {
+ return this._outputActivation;
+ }
+ set integer(inputInteger) {
+ this._integer = inputInteger;
+ this._inputActivation = this.convertInteger(inputInteger);
+ this._outputActivation = math.zeros(this._outputDigits);
+ this._outputActivation._data[this._integer] = 1;
+ }
+ get integer() {
+ return this._integer;
+ }
+};
diff --git a/src/demo.js b/src/demo.js
new file mode 100644
index 0000000..75fa61a
--- /dev/null
+++ b/src/demo.js
@@ -0,0 +1,18 @@
+console.log('Hello from demo.js');
+
+let v0 = [];
+let v1 = [1, 2, 3, 4];
+let v2 = [5, 6, 7, 8];
+
+v0 = [v1, v2]
+
+v0 = math.matrix(v0);
+v1 = math.matrix(v1);
+v2 = math.matrix(v2);
+
+v1 = math.transpose(v1);
+v2 = math.transpose(v2);
+
+let v3 = math.concat(v1, v2, 0);
+
+console.log(v3);
diff --git a/src/layer.js b/src/layer.js
new file mode 100644
index 0000000..65b14ec
--- /dev/null
+++ b/src/layer.js
@@ -0,0 +1,53 @@
+console.log('Hello from layer.js');
+
+class Layer {
+ constructor(inputCount, nodeCount, activationFunctionName) {
+ this.gx = activationFunctionList[activationFunctionName].gx;
+ this.dg_dx = activationFunctionList[activationFunctionName].dg_dx;
+ this.weights = math.matrix(math.random([inputCount, nodeCount]));
+ this.weights = math.map(this.weights, (element) => (element - 0.5) * 2);
+ this.biases = math.matrix(math.zeros([nodeCount]));
+ this.z = math.matrix(math.zeros([nodeCount]));
+ this.dc_dw_vectors = math.matrix(math.zeros(this.weights._size));
+ this.dc_db_vectors = math.matrix(math.zeros(this.biases._size));
+ console.log('dc_db_vectors', this.dc_db_vectors);
+ }
+ forwardPropogation(activationInput) {
+ this.ai = activationInput;
+ let z = math.multiply(activationInput, this.weights);
+ z = math.add(z, this.biases);
+ this.z = z;
+ let ao = this.gx(z);
+ return ao;
+ }
+ backPropogation(dc_dao) {
+ let dao_dz = this.dg_dx(this.z);
+ let dc_db = math.dotMultiply(dao_dz, dc_dao);
+ let dz_dw = math.ones(this.weights._size);
+ dz_dw = math.multiply(this.ai, dz_dw);
+ let dc_dw = math.dotMultiply(dc_db, dz_dw);
+ let dz_dai = this.weights;
+ let dc_dai = math.multiply(dz_dai, dc_db);
+
+ //store cost vectors for gradient descent
+ console.log('dc_db_vectors', this.dc_db_vectors);
+ console.log('dc_db', dc_db);
+ this.dc_db_vectors = math.concat(this.dc_db_vectors, dc_db, 0);
+ this.dc_dw_vectors = math.concat(this.dc_dw_vectors, dc_dw, 0);
+ console.log('dc_db_vectors', this.dc_db_vectors);
+
+ return dc_dai;
+ }
+ gradientDescent(learningRate) {
+
+ console.log(this.dc_dw_vectors);
+ console.log(this.dc_db_vectors);
+
+ let dc_dw_avg = math.mean(this.dc_dw_vectors, 0)
+ let dc_db_avg = math.mean(this.dc_db_vectors, 0)
+ this.weights = this.weights - (learningRate * dc_dw_avg);
+ this.biases = this.biases - (learningRate * dc_dw_avg);
+ this.dc_dw_vectors = math.matrix([]);
+ this.dc_db_vectors = math.matrix([]);
+ }
+};
diff --git a/src/nnetwork.js b/src/nnetwork.js
new file mode 100644
index 0000000..dd21491
--- /dev/null
+++ b/src/nnetwork.js
@@ -0,0 +1,36 @@
+console.log('Hello from nnetwork.js');
+
+class NNetwork {
+ constructor(nodeCounts, activationFunctionNames, learningRate) {
+ this.learningRate = learningRate;
+ this.layers = [];
+ for (let i = 1; i < nodeCounts.length; i++) {
+ this.layers.push(new Layer(nodeCounts[i-1], nodeCounts[i], activationFunctionNames[i]));
+ };
+ }
+ forwardPropogation(activationInput) {
+ let ao = activationInput;
+ this.layers.forEach(layer => {
+ ao = layer.forwardPropogation(ao);
+ });
+ return ao;
+ }
+ backPropogation(activationInput, targetOutput) {
+ let ao = this.forwardPropogation(activationInput);
+ let cost = math.subtract(ao, targetOutput);
+ let dc_da = math.multiply(cost, 2);
+ cost = math.map(cost, element => element ** 2);
+ cost = math.sum(cost)
+ this.layers.reverse().forEach(layer => {
+ dc_da = layer.backPropogation(dc_da);
+ });
+ this.layers.reverse()
+ return cost;
+ }
+ gradientDescent() {
+ this.layers.reverse().forEach(layer => {
+ layer.gradientDescent(this.learningRate);
+ });
+ this.layers.reverse()
+ }
+};
diff --git a/test/unit_tests/activationFunctions.test.js b/test/unit_tests/activationFunctions.test.js
new file mode 100644
index 0000000..b7fd570
--- /dev/null
+++ b/test/unit_tests/activationFunctions.test.js
@@ -0,0 +1,23 @@
+console.log('Hello from activationFunctions.test.js');
+
+console.log(activationFunctionList);
+
+let result;
+let testVector = [-2, -1, 0, 1, 2];
+testVector = math.matrix(testVector);
+let testMatrix = math.matrix([testVector, testVector]);
+
+result = activationFunctionList['sigmoid'].gx(testVector);
+console.log(result);
+result = activationFunctionList['sigmoid'].dg_dx(testVector);
+console.log(result);
+
+result = activationFunctionList['relu'].gx(testMatrix);
+console.log(result);
+result = activationFunctionList['relu'].dg_dx(testMatrix);
+console.log(result);
+
+result = activationFunctionList['identity'].gx(testMatrix);
+console.log(result);
+result = activationFunctionList['identity'].dg_dx(testMatrix);
+console.log(result);
diff --git a/test/unit_tests/binaryConverter.test.js b/test/unit_tests/binaryConverter.test.js
new file mode 100644
index 0000000..b6fb503
--- /dev/null
+++ b/test/unit_tests/binaryConverter.test.js
@@ -0,0 +1,33 @@
+const BinaryConverter = require('../../src/binaryConverter');
+const math = require('mathjs');
+
+//import BinaryConverter from 'binaryConverter';
+
+describe('Test BinaryConversion module', () => {
+
+ let testConverter;
+ let testVector;
+ let testInteger;
+
+ beforeEach(() => {
+ testConverter = new BinaryConverter(2);
+ testInteger = 2;
+ testVector = math.matrix([1, 0]);
+ });
+
+
+ test('convert integer to binary array', () => {
+ testConverter.integer = testInteger;
+ expect(testConverter.inputActivation).toEqual(testVector);
+ });
+
+ test('convert binary array to integer', () => {
+ testConverter.inputActivation = testVector;
+ expect(testConverter.integer).toEqual(testInteger);
+ });
+
+ test.todo('Random array initializes correct integer');
+
+});
+
+
diff --git a/test/unit_tests/layer.test.js b/test/unit_tests/layer.test.js
new file mode 100644
index 0000000..37402d3
--- /dev/null
+++ b/test/unit_tests/layer.test.js
@@ -0,0 +1,15 @@
+console.log('Hello from layer.test.js');
+
+let testLayer = new Layer(2, 3, 'relu');
+let testConv = new BinaryConverter(2);
+
+console.log(testLayer);
+console.log(testConv.randomInput());
+
+console.log(testLayer.forwardPropogation(testConv.inputActivation));
+
+testLayer = new Layer(2, 3, 'sigmoid');
+console.log(testLayer.forwardPropogation(testConv.inputActivation));
+
+testLayer = new Layer(2, 3, 'identity');
+console.log(testLayer.forwardPropogation(testConv.inputActivation));
diff --git a/test/unit_tests/nnetwork.test.js b/test/unit_tests/nnetwork.test.js
new file mode 100644
index 0000000..efc3f6e
--- /dev/null
+++ b/test/unit_tests/nnetwork.test.js
@@ -0,0 +1,17 @@
+console.log('Hello from nnetwork.test.js');
+
+let testConv = new BinaryConverter(2);
+let nodeCounts = [testConv._inputDigits, 3, testConv._outputDigits];
+let afns = ['identity', 'relu', 'identity'];
+
+let testNNetwork = new NNetwork(nodeCounts, afns, 0.1);
+
+console.log(testConv.randomInput());
+console.log(testNNetwork.forwardPropogation(testConv.inputActivation));
+console.log(testNNetwork.backPropogation(testConv.inputActivation, testConv.outputActivation));
+
+console.log(testConv.randomInput());
+console.log(testNNetwork.forwardPropogation(testConv.inputActivation));
+console.log(testNNetwork.backPropogation(testConv.inputActivation, testConv.outputActivation));
+
+testNNetwork.gradientDescent(); \ No newline at end of file