diff --git a/Esiur.Analysis.Test/Program.cs b/Esiur.Analysis.Test/Program.cs index 39f078b..bb05509 100644 --- a/Esiur.Analysis.Test/Program.cs +++ b/Esiur.Analysis.Test/Program.cs @@ -39,6 +39,9 @@ namespace Esiur.Analysis.Test [STAThread] static void Main() { + + var f = Esiur.Analysis.Algebra.Functions.Sigmoid; + var signalA = new double[] { V,1, V, 1 , V, V, V }; var signalB = new double[] { V, V, 1, V, V, 1, V }; var cor = signalA.CrossCorrelation(signalB, true); diff --git a/Esiur.Analysis/Algebra/Functions.cs b/Esiur.Analysis/Algebra/Functions.cs new file mode 100644 index 0000000..e8f1cfa --- /dev/null +++ b/Esiur.Analysis/Algebra/Functions.cs @@ -0,0 +1,42 @@ +using Esiur.Analysis.Neural; +using System; +using System.Collections.Generic; +using System.Text; + +namespace Esiur.Analysis.Algebra +{ + + public static class Functions + { + private static MathFunction SigmoidDerivative + = new MathFunction(new RealFunction(x => x * (1 - x)), null, null); + + public static MathFunction Sigmoid = new MathFunction(new RealFunction(x => + { + double k = Math.Exp(x); + return k / (1.0 + k); + }), SigmoidDerivative, null); + + + private static MathFunction TanhDerivative = new MathFunction( + new RealFunction(x => 1 - (x * x)), null, null); + + public static MathFunction Tanh = new MathFunction( + new RealFunction(x => Math.Tanh(x)), TanhDerivative, null); + + + private static MathFunction ReLUDerivative = new MathFunction( + new RealFunction(x => (0 >= x) ? 0 : 1), null, null); + + public static MathFunction ReLU = new MathFunction( + new RealFunction(x => (0 >= x) ? 0 : x), ReLUDerivative, null); + + + private static MathFunction LeakyReLUDerivative = new MathFunction( + new RealFunction(x => (0 >= x) ? 0.01 : 1), null, null); + + public static MathFunction LeakyReLU = new MathFunction( + new RealFunction(x => (0 >= x) ? 0.01 * x : x), LeakyReLUDerivative, null); + + } +} \ No newline at end of file diff --git a/Esiur.Analysis/Algebra/MathFunction.cs b/Esiur.Analysis/Algebra/MathFunction.cs new file mode 100644 index 0000000..f1113f6 --- /dev/null +++ b/Esiur.Analysis/Algebra/MathFunction.cs @@ -0,0 +1,29 @@ +using Esiur.Analysis.Neural; +using System; +using System.Collections.Generic; +using System.Text; + +namespace Esiur.Analysis.Algebra +{ + + public delegate double RealFunction(double x); + public delegate double Real2Function(double x, double y); + public delegate double Real3Function(double x, double y, double z); + + public class MathFunction + { + public T Function { get; internal set; } + public MathFunction Derivative { get; internal set; } + public MathFunction Integral { get; internal set; } + + public MathFunction(T function, MathFunction derivative, MathFunction integral) + { + Function = function; + Derivative = derivative; + Integral = integral; + } + + } + + +} diff --git a/Esiur.Analysis/Neural/ActivationFunction.cs b/Esiur.Analysis/Neural/ActivationFunction.cs new file mode 100644 index 0000000..b76c4c8 --- /dev/null +++ b/Esiur.Analysis/Neural/ActivationFunction.cs @@ -0,0 +1,87 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace Esiur.Analysis.Neural +{ + public delegate double ActivationFunction(double value); + + public static class ActivationFunctions + { + + public static (ActivationFunction, ActivationFunction) Sigmoid() + { + return (new ActivationFunction(x => + { + float k = (float)Math.Exp(x); + return k / (1.0 + k); + }), + new ActivationFunction(x => + { + return x * (1 - x); + })); + } + + public static ActivationFunction Tanh() + { + return new ActivationFunction(x => + { + return Math.Tanh(x); + }); + } + + + public static ActivationFunction ReLU() + { + return new ActivationFunction(x => + { + return (0 >= x) ? 0 : x; + }); + } + + public static ActivationFunction LeakyReLU() + { + return new ActivationFunction(x => + { + return (0 >= x) ? 0.01 * x : x; + }); + } + + public static ActivationFunction SigmoidDer() + { + return new ActivationFunction(x => + { + return x * (1 - x); + }); + } + + public static ActivationFunction TanhDer() + { + return new ActivationFunction(x => + { + return 1 - (x * x); + }); + } + + public static ActivationFunction ReLUDer() + { + return new ActivationFunction(x => + { + return (0 >= x) ? 0 : 1; + }); + } + + public static ActivationFunction LeakyReLUDer() + { + return new ActivationFunction(x => + { + return (0 >= x) ? 0.01f : 1; + }); + } + + + + + } + +} diff --git a/Esiur.Analysis/Neural/NeuralLayer.cs b/Esiur.Analysis/Neural/NeuralLayer.cs new file mode 100644 index 0000000..5fe8cdc --- /dev/null +++ b/Esiur.Analysis/Neural/NeuralLayer.cs @@ -0,0 +1,50 @@ +using Esiur.Analysis.Algebra; +using Microsoft.CodeAnalysis; +using System; +using System.Collections.Generic; +using System.Text; + +namespace Esiur.Analysis.Neural +{ + internal class NeuralLayer + { + + + public Neuron[] Neurons { get; internal set; } + + public MathFunction Activation { get; internal set; } + + public NeuralLayer? PreviousLayer { get; internal set; } + public double Bias { get; set; } + + public NeuralLayer(int nodes, MathFunction activation, NeuralLayer? previousLayer) + { + + PreviousLayer = previousLayer; + Neurons = new Neuron[nodes]; + Activation = activation; + + for(var i = 0; i < nodes; i++) + { + var synapses = new List(); + + var neuron = new Neuron() + { + Layer = this, + }; + + + if (previousLayer != null) + { + for(var j = 0; j < previousLayer.Neurons.Length; j++) + { + synapses.Add(new Synapse() { Source = previousLayer.Neurons[j] , Target = neuron, Weight = 0}); + } + } + + Neurons[i] = neuron; + } + + } + } +} diff --git a/Esiur.Analysis/Neural/NeuralNetwork.cs b/Esiur.Analysis/Neural/NeuralNetwork.cs new file mode 100644 index 0000000..9ce1c0e --- /dev/null +++ b/Esiur.Analysis/Neural/NeuralNetwork.cs @@ -0,0 +1,101 @@ +using Esiur.Analysis.Algebra; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace Esiur.Analysis.Neural +{ + public class NeuralNetwork + { + NeuralLayer[] neuralLayers; + + public NeuralNetwork(int[] layers, MathFunction[] activations) + { + neuralLayers = new NeuralLayer[layers.Length]; + + for (var i = 0; i < layers.Length; i++) + neuralLayers[i] = new NeuralLayer(layers[i], activations[i], i == 0 ? null : neuralLayers[i-1]); + } + + public double[] FeedForward(double[] input) + { + for (int i = 0; i < input.Length; i++) + { + neuralLayers[0].Neurons[i].Value = input[i]; + } + + for (int i = 1; i < neuralLayers.Length; i++) + { + for (int j = 0; j < neuralLayers[i].Neurons.Length; j++) + { + neuralLayers[i].Neurons[j].Forward(); + } + } + + return neuralLayers.Last().Neurons.Select(x => x.Value).ToArray(); + } + + + + public void BackPropagate(double[] input, double[] target) + { + var output = FeedForward(input); + + // total error (square error function) + double totalError = 0.5 * output.Zip(target, (x, y) => Math.Pow(x - y, 2)).Sum() ; + + + // calculate partial derivitave of E-Total with respect to weights dE/dW = dE/dOF * dOF/dO * dO/dW + + for(var i = 0; i < target.Length; i++) + { + var z = -(target[i] - output[i]) * + } + //for (int i = 0; i < output.Length; i++) + // totalError += (float)Math.Pow(output[i] - expected[i], 2);//calculated cost of network + + //totalError /= 2; //this value is not used in calculions, rather used to identify the performance of the network + + + + var gamma = neuralLayers.Select(x => x.Neurons.Select(n => n.Value).ToArray()).ToArray(); + + + int layer = layers.Length - 2; + + for (int i = 0; i < output.Length; i++) gamma[layers.Length - 1][i] = (output[i] - expected[i]) * activateDer(output[i], layer);//Gamma calculation + for (int i = 0; i < layers[layers.Length - 1]; i++)//calculates the w' and b' for the last layer in the network + { + biases[layers.Length - 2][i] -= gamma[layers.Length - 1][i] * learningRate; + for (int j = 0; j < layers[layers.Length - 2]; j++) + { + + weights[layers.Length - 2][i][j] -= gamma[layers.Length - 1][i] * neurons[layers.Length - 2][j] * learningRate;//*learning + } + } + + for (int i = layers.Length - 2; i > 0; i--)//runs on all hidden layers + { + layer = i - 1; + for (int j = 0; j < layers[i]; j++)//outputs + { + gamma[i][j] = 0; + for (int k = 0; k < gamma[i + 1].Length; k++) + { + gamma[i][j] += gamma[i + 1][k] * weights[i][k][j]; + } + gamma[i][j] *= activateDer(neurons[i][j], layer);//calculate gamma + } + for (int j = 0; j < layers[i]; j++)//itterate over outputs of layer + { + biases[i - 1][j] -= gamma[i][j] * learningRate;//modify biases of network + for (int k = 0; k < layers[i - 1]; k++)//itterate over inputs to layer + { + weights[i - 1][j][k] -= gamma[i][j] * neurons[i - 1][k] * learningRate;//modify weights of network + } + } + } + } + } +} diff --git a/Esiur.Analysis/Neural/Neuron.cs b/Esiur.Analysis/Neural/Neuron.cs new file mode 100644 index 0000000..5b37cc4 --- /dev/null +++ b/Esiur.Analysis/Neural/Neuron.cs @@ -0,0 +1,21 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace Esiur.Analysis.Neural +{ + internal class Neuron + { + public double Value { get; set; } + public NeuralLayer Layer { get; set; } + + public List Synapses { get; set; } = new List(); + + public void Forward() + { + var sum = Synapses.Sum(x => x.Weight * x.Source.Value); + Value = Layer.Activation.Function(sum + Layer.PreviousLayer?.Bias ?? 0); + } + } +} diff --git a/Esiur.Analysis/Neural/Synapse.cs b/Esiur.Analysis/Neural/Synapse.cs new file mode 100644 index 0000000..a874917 --- /dev/null +++ b/Esiur.Analysis/Neural/Synapse.cs @@ -0,0 +1,14 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace Esiur.Analysis.Neural +{ + internal class Synapse + { + public double Weight { get; set; } + + public Neuron Source { get; set; } + public Neuron Target { get; set; } + } +}