2
0
mirror of https://github.com/esiur/esiur-dotnet.git synced 2025-05-06 11:32:59 +00:00
This commit is contained in:
Ahmed Zamil 2023-01-28 15:33:49 +03:00
parent 01d53ea67c
commit 9e620a98ca
8 changed files with 347 additions and 0 deletions

View File

@ -39,6 +39,9 @@ namespace Esiur.Analysis.Test
[STAThread]
static void Main()
{
var f = Esiur.Analysis.Algebra.Functions.Sigmoid;
var signalA = new double[] { V,1, V, 1 , V, V, V };
var signalB = new double[] { V, V, 1, V, V, 1, V };
var cor = signalA.CrossCorrelation(signalB, true);

View File

@ -0,0 +1,42 @@
using Esiur.Analysis.Neural;
using System;
using System.Collections.Generic;
using System.Text;
namespace Esiur.Analysis.Algebra
{
public static class Functions
{
private static MathFunction<RealFunction> SigmoidDerivative
= new MathFunction<RealFunction>(new RealFunction(x => x * (1 - x)), null, null);
public static MathFunction<RealFunction> Sigmoid = new MathFunction<RealFunction>(new RealFunction(x =>
{
double k = Math.Exp(x);
return k / (1.0 + k);
}), SigmoidDerivative, null);
private static MathFunction<RealFunction> TanhDerivative = new MathFunction<RealFunction>(
new RealFunction(x => 1 - (x * x)), null, null);
public static MathFunction<RealFunction> Tanh = new MathFunction<RealFunction>(
new RealFunction(x => Math.Tanh(x)), TanhDerivative, null);
private static MathFunction<RealFunction> ReLUDerivative = new MathFunction<RealFunction>(
new RealFunction(x => (0 >= x) ? 0 : 1), null, null);
public static MathFunction<RealFunction> ReLU = new MathFunction<RealFunction>(
new RealFunction(x => (0 >= x) ? 0 : x), ReLUDerivative, null);
private static MathFunction<RealFunction> LeakyReLUDerivative = new MathFunction<RealFunction>(
new RealFunction(x => (0 >= x) ? 0.01 : 1), null, null);
public static MathFunction<RealFunction> LeakyReLU = new MathFunction<RealFunction>(
new RealFunction(x => (0 >= x) ? 0.01 * x : x), LeakyReLUDerivative, null);
}
}

View File

@ -0,0 +1,29 @@
using Esiur.Analysis.Neural;
using System;
using System.Collections.Generic;
using System.Text;
namespace Esiur.Analysis.Algebra
{
public delegate double RealFunction(double x);
public delegate double Real2Function(double x, double y);
public delegate double Real3Function(double x, double y, double z);
public class MathFunction<T>
{
public T Function { get; internal set; }
public MathFunction<T> Derivative { get; internal set; }
public MathFunction<T> Integral { get; internal set; }
public MathFunction(T function, MathFunction<T> derivative, MathFunction<T> integral)
{
Function = function;
Derivative = derivative;
Integral = integral;
}
}
}

View File

@ -0,0 +1,87 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace Esiur.Analysis.Neural
{
public delegate double ActivationFunction(double value);
public static class ActivationFunctions
{
public static (ActivationFunction, ActivationFunction) Sigmoid()
{
return (new ActivationFunction(x =>
{
float k = (float)Math.Exp(x);
return k / (1.0 + k);
}),
new ActivationFunction(x =>
{
return x * (1 - x);
}));
}
public static ActivationFunction Tanh()
{
return new ActivationFunction(x =>
{
return Math.Tanh(x);
});
}
public static ActivationFunction ReLU()
{
return new ActivationFunction(x =>
{
return (0 >= x) ? 0 : x;
});
}
public static ActivationFunction LeakyReLU()
{
return new ActivationFunction(x =>
{
return (0 >= x) ? 0.01 * x : x;
});
}
public static ActivationFunction SigmoidDer()
{
return new ActivationFunction(x =>
{
return x * (1 - x);
});
}
public static ActivationFunction TanhDer()
{
return new ActivationFunction(x =>
{
return 1 - (x * x);
});
}
public static ActivationFunction ReLUDer()
{
return new ActivationFunction(x =>
{
return (0 >= x) ? 0 : 1;
});
}
public static ActivationFunction LeakyReLUDer()
{
return new ActivationFunction(x =>
{
return (0 >= x) ? 0.01f : 1;
});
}
}
}

View File

@ -0,0 +1,50 @@
using Esiur.Analysis.Algebra;
using Microsoft.CodeAnalysis;
using System;
using System.Collections.Generic;
using System.Text;
namespace Esiur.Analysis.Neural
{
internal class NeuralLayer
{
public Neuron[] Neurons { get; internal set; }
public MathFunction<RealFunction> Activation { get; internal set; }
public NeuralLayer? PreviousLayer { get; internal set; }
public double Bias { get; set; }
public NeuralLayer(int nodes, MathFunction<RealFunction> activation, NeuralLayer? previousLayer)
{
PreviousLayer = previousLayer;
Neurons = new Neuron[nodes];
Activation = activation;
for(var i = 0; i < nodes; i++)
{
var synapses = new List<Synapse>();
var neuron = new Neuron()
{
Layer = this,
};
if (previousLayer != null)
{
for(var j = 0; j < previousLayer.Neurons.Length; j++)
{
synapses.Add(new Synapse() { Source = previousLayer.Neurons[j] , Target = neuron, Weight = 0});
}
}
Neurons[i] = neuron;
}
}
}
}

View File

@ -0,0 +1,101 @@
using Esiur.Analysis.Algebra;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Esiur.Analysis.Neural
{
public class NeuralNetwork
{
NeuralLayer[] neuralLayers;
public NeuralNetwork(int[] layers, MathFunction<RealFunction>[] activations)
{
neuralLayers = new NeuralLayer[layers.Length];
for (var i = 0; i < layers.Length; i++)
neuralLayers[i] = new NeuralLayer(layers[i], activations[i], i == 0 ? null : neuralLayers[i-1]);
}
public double[] FeedForward(double[] input)
{
for (int i = 0; i < input.Length; i++)
{
neuralLayers[0].Neurons[i].Value = input[i];
}
for (int i = 1; i < neuralLayers.Length; i++)
{
for (int j = 0; j < neuralLayers[i].Neurons.Length; j++)
{
neuralLayers[i].Neurons[j].Forward();
}
}
return neuralLayers.Last().Neurons.Select(x => x.Value).ToArray();
}
public void BackPropagate(double[] input, double[] target)
{
var output = FeedForward(input);
// total error (square error function)
double totalError = 0.5 * output.Zip(target, (x, y) => Math.Pow(x - y, 2)).Sum() ;
// calculate partial derivitave of E-Total with respect to weights dE/dW = dE/dOF * dOF/dO * dO/dW
for(var i = 0; i < target.Length; i++)
{
var z = -(target[i] - output[i]) *
}
//for (int i = 0; i < output.Length; i++)
// totalError += (float)Math.Pow(output[i] - expected[i], 2);//calculated cost of network
//totalError /= 2; //this value is not used in calculions, rather used to identify the performance of the network
var gamma = neuralLayers.Select(x => x.Neurons.Select(n => n.Value).ToArray()).ToArray();
int layer = layers.Length - 2;
for (int i = 0; i < output.Length; i++) gamma[layers.Length - 1][i] = (output[i] - expected[i]) * activateDer(output[i], layer);//Gamma calculation
for (int i = 0; i < layers[layers.Length - 1]; i++)//calculates the w' and b' for the last layer in the network
{
biases[layers.Length - 2][i] -= gamma[layers.Length - 1][i] * learningRate;
for (int j = 0; j < layers[layers.Length - 2]; j++)
{
weights[layers.Length - 2][i][j] -= gamma[layers.Length - 1][i] * neurons[layers.Length - 2][j] * learningRate;//*learning
}
}
for (int i = layers.Length - 2; i > 0; i--)//runs on all hidden layers
{
layer = i - 1;
for (int j = 0; j < layers[i]; j++)//outputs
{
gamma[i][j] = 0;
for (int k = 0; k < gamma[i + 1].Length; k++)
{
gamma[i][j] += gamma[i + 1][k] * weights[i][k][j];
}
gamma[i][j] *= activateDer(neurons[i][j], layer);//calculate gamma
}
for (int j = 0; j < layers[i]; j++)//itterate over outputs of layer
{
biases[i - 1][j] -= gamma[i][j] * learningRate;//modify biases of network
for (int k = 0; k < layers[i - 1]; k++)//itterate over inputs to layer
{
weights[i - 1][j][k] -= gamma[i][j] * neurons[i - 1][k] * learningRate;//modify weights of network
}
}
}
}
}
}

View File

@ -0,0 +1,21 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Esiur.Analysis.Neural
{
internal class Neuron
{
public double Value { get; set; }
public NeuralLayer Layer { get; set; }
public List<Synapse> Synapses { get; set; } = new List<Synapse>();
public void Forward()
{
var sum = Synapses.Sum(x => x.Weight * x.Source.Value);
Value = Layer.Activation.Function(sum + Layer.PreviousLayer?.Bias ?? 0);
}
}
}

View File

@ -0,0 +1,14 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace Esiur.Analysis.Neural
{
internal class Synapse
{
public double Weight { get; set; }
public Neuron Source { get; set; }
public Neuron Target { get; set; }
}
}