460 Zeilen
15 KiB
MQL5
460 Zeilen
15 KiB
MQL5
//+------------------------------------------------------------------+
|
|
//| NeuralNet.mqh |
|
|
//| Copyright 2025, Google Gemini |
|
|
//| https://www.mql5.com |
|
|
//+------------------------------------------------------------------+
|
|
#property copyright "Copyright 2025, Google Gemini"
|
|
#property link "https://www.google.com"
|
|
#property strict
|
|
|
|
#include <Arrays\ArrayObj.mqh>
|
|
|
|
//+------------------------------------------------------------------+
|
|
//| Neuron Connection Structure |
|
|
//+------------------------------------------------------------------+
|
|
struct SConnection
|
|
{
|
|
double weight;
|
|
double deltaWeight;
|
|
};
|
|
|
|
//+------------------------------------------------------------------+
|
|
//| Neuron Class |
|
|
//+------------------------------------------------------------------+
|
|
class CNeuron : public CObject
|
|
{
|
|
public:
|
|
double m_outputVal;
|
|
double m_gradient;
|
|
SConnection m_outputWeights[];
|
|
int m_myIndex;
|
|
|
|
CNeuron(int numOutputs, int myIndex);
|
|
~CNeuron() {}
|
|
|
|
void SetOutputVal(double val) { m_outputVal = val; }
|
|
double GetOutputVal() const { return m_outputVal; }
|
|
void FeedForward(CArrayObj *prevLayer);
|
|
void CalcOutputGradients(double targetVal);
|
|
void CalcHiddenGradients(CArrayObj *nextLayer);
|
|
void UpdateInputWeights(CArrayObj *prevLayer);
|
|
|
|
// Serialization
|
|
void SaveWeights(int fileHandle);
|
|
void LoadWeights(int fileHandle, int numOutputs);
|
|
void Jitter(double magnitude);
|
|
|
|
private:
|
|
static double TransferFunction(double x);
|
|
static double TransferFunctionDerivative(double x);
|
|
|
|
public:
|
|
// Hyperparameters (per-neuron, managed by CNeuralNet)
|
|
double m_eta;
|
|
double m_alpha;
|
|
double m_lambda;
|
|
};
|
|
|
|
//--- Constructor
|
|
CNeuron::CNeuron(int numOutputs, int myIndex)
|
|
{
|
|
ArrayResize(m_outputWeights, numOutputs);
|
|
m_myIndex = myIndex;
|
|
m_eta = 0.15;
|
|
m_alpha = 0.5;
|
|
m_lambda = 0.001;
|
|
for(int c = 0; c < numOutputs; ++c) {
|
|
m_outputWeights[c].weight = (MathRand() / 32767.0) - 0.5; // Random -0.5 .. 0.5
|
|
m_outputWeights[c].deltaWeight = 0;
|
|
}
|
|
}
|
|
|
|
//--- Activation Function: Tanh
|
|
double CNeuron::TransferFunction(double x)
|
|
{
|
|
// Tanh - output range [-1.0..1.0]
|
|
return tanh(x);
|
|
}
|
|
|
|
//--- Derivative for Backprop
|
|
double CNeuron::TransferFunctionDerivative(double x)
|
|
{
|
|
// tanh derivative approximation: 1.0 - x*x
|
|
return 1.0 - x * x;
|
|
}
|
|
|
|
//--- Feed Forward
|
|
void CNeuron::FeedForward(CArrayObj *prevLayer)
|
|
{
|
|
double sum = 0.0;
|
|
|
|
// Sum the previous layer's outputs (inputs to this neuron)
|
|
// Include logic for bias neuron if needed (usually last neuron in layer)
|
|
for(int n = 0; n < prevLayer.Total(); ++n) {
|
|
CNeuron *neuron = (CNeuron*)prevLayer.At(n);
|
|
sum += neuron.GetOutputVal() * neuron.m_outputWeights[m_myIndex].weight;
|
|
}
|
|
|
|
m_outputVal = TransferFunction(sum);
|
|
}
|
|
|
|
//--- Calculate Gradients for Output Layer
|
|
void CNeuron::CalcOutputGradients(double targetVal)
|
|
{
|
|
double delta = targetVal - m_outputVal;
|
|
m_gradient = delta * TransferFunctionDerivative(m_outputVal);
|
|
}
|
|
|
|
//--- Calculate Gradients for Hidden Layers
|
|
void CNeuron::CalcHiddenGradients(CArrayObj *nextLayer)
|
|
{
|
|
double dow = 0.0; // Sum of derivative of weights
|
|
|
|
// Loop through neurons in next layer (excluding bias)
|
|
for(int n = 0; n < nextLayer.Total() - 1; ++n) {
|
|
CNeuron *neuron = (CNeuron*)nextLayer.At(n);
|
|
dow += m_outputWeights[n].weight * neuron.m_gradient;
|
|
}
|
|
|
|
m_gradient = dow * TransferFunctionDerivative(m_outputVal);
|
|
}
|
|
|
|
//--- Update Weights
|
|
void CNeuron::UpdateInputWeights(CArrayObj *prevLayer)
|
|
{
|
|
// The inputs to THIS neuron are the outputs of the PREVIOUS layer
|
|
for(int n = 0; n < prevLayer.Total(); ++n) {
|
|
CNeuron *neuron = (CNeuron*)prevLayer.At(n);
|
|
double oldDeltaWeight = neuron.m_outputWeights[m_myIndex].deltaWeight;
|
|
|
|
double newDeltaWeight =
|
|
// Individual input, magnified by the gradient and train rate
|
|
m_eta * neuron.GetOutputVal() * m_gradient
|
|
// Also add momentum = a fraction of the previous delta weight
|
|
+ m_alpha * oldDeltaWeight;
|
|
|
|
// Apply L2 Regularization (Weight Decay)
|
|
// The weight itself is decayed by a factor of (1 - lambda) before the newDeltaWeight is added.
|
|
neuron.m_outputWeights[m_myIndex].deltaWeight = newDeltaWeight;
|
|
neuron.m_outputWeights[m_myIndex].weight = (neuron.m_outputWeights[m_myIndex].weight * (1.0 - m_lambda)) + newDeltaWeight;
|
|
}
|
|
}
|
|
|
|
//--- Save Weights
|
|
void CNeuron::SaveWeights(int fileHandle)
|
|
{
|
|
// Save all output weights from this neuron
|
|
for(int c = 0; c < ArraySize(m_outputWeights); ++c) {
|
|
FileWriteDouble(fileHandle, m_outputWeights[c].weight);
|
|
}
|
|
}
|
|
|
|
//--- Load Weights
|
|
void CNeuron::LoadWeights(int fileHandle, int numOutputs)
|
|
{
|
|
ArrayResize(m_outputWeights, numOutputs);
|
|
for(int c = 0; c < numOutputs; ++c) {
|
|
m_outputWeights[c].weight = FileReadDouble(fileHandle);
|
|
m_outputWeights[c].deltaWeight = 0;
|
|
}
|
|
}
|
|
|
|
//--- Add noise to weights
|
|
void CNeuron::Jitter(double magnitude)
|
|
{
|
|
for(int c = 0; c < ArraySize(m_outputWeights); ++c) {
|
|
m_outputWeights[c].weight += (MathRand() / 32767.0 - 0.5) * magnitude;
|
|
}
|
|
}
|
|
|
|
|
|
//+------------------------------------------------------------------+
|
|
//| Neural Network Class |
|
|
//+------------------------------------------------------------------+
|
|
class CNeuralNet
|
|
{
|
|
private:
|
|
// m_layers[layerNum][neuronNum]
|
|
// We use ArrayObj to hold Neurons for each layer
|
|
CArrayObj *m_layers[];
|
|
double m_error;
|
|
double m_recentAverageError;
|
|
double m_recentAverageSmoothingFactor;
|
|
|
|
public:
|
|
CNeuralNet(const int &topology[]);
|
|
~CNeuralNet();
|
|
|
|
void FeedForward(const double &inputVals[]);
|
|
void BackProp(const double &targetVals[]);
|
|
void GetResults(double &resultVals[]) const;
|
|
double GetRecentAverageError(void) const { return m_recentAverageError; }
|
|
|
|
bool Save(string fileName, int flags=0);
|
|
bool Save(int handle);
|
|
bool Load(string fileName, const int &topology[], int flags=0);
|
|
bool Load(int handle, const int &topology[]);
|
|
|
|
// Hyperparameters
|
|
double eta; // Learning Rate
|
|
double alpha; // Momentum
|
|
double lambda; // L2 Regularization
|
|
|
|
void SetLearningRate(double rate);
|
|
void SetAlpha(double a);
|
|
void SetLambda(double l);
|
|
void Jitter(double magnitude);
|
|
};
|
|
|
|
//--- Constructor
|
|
CNeuralNet::CNeuralNet(const int &topology[])
|
|
{
|
|
int numLayers = ArraySize(topology);
|
|
ArrayResize(m_layers, numLayers);
|
|
|
|
m_recentAverageSmoothingFactor = 100.0; // simple moving average over approx 100 samples
|
|
eta = 0.15;
|
|
alpha = 0.5;
|
|
lambda = 0.001;
|
|
|
|
for(int layerNum = 0; layerNum < numLayers; ++layerNum) {
|
|
// Create new layer
|
|
m_layers[layerNum] = new CArrayObj();
|
|
|
|
// Number of outputs for neurons in this layer
|
|
// is the number of neurons in the next layer
|
|
// (Exception: Output layer has 0 outputs)
|
|
int numOutputs = (layerNum == numLayers - 1) ? 0 : topology[layerNum + 1];
|
|
|
|
// We add a bias neuron to every layer -> topology[layerNum] + 1
|
|
// For Output layer, bias neuron is technically not needed but harmless,
|
|
// however usually we iterate <= Total() - 1 to ignore bias output.
|
|
// Let's add <= topology[layerNum] for neurons, plus one for BIAS
|
|
for(int neuronNum = 0; neuronNum <= topology[layerNum]; ++neuronNum) {
|
|
CNeuron *neuron = new CNeuron(numOutputs, neuronNum);
|
|
m_layers[layerNum].Add(neuron);
|
|
// Force bias node's output to be 1.0 (it's the last one)
|
|
if(neuronNum == topology[layerNum])
|
|
neuron.SetOutputVal(1.0);
|
|
}
|
|
}
|
|
|
|
// Propagate initial hyperparameters to all neurons
|
|
SetLearningRate(eta);
|
|
SetAlpha(alpha);
|
|
SetLambda(lambda);
|
|
}
|
|
|
|
//--- Destructor
|
|
CNeuralNet::~CNeuralNet()
|
|
{
|
|
for(int i = 0; i < ArraySize(m_layers); ++i) {
|
|
if(CheckPointer(m_layers[i]) != POINTER_INVALID) {
|
|
delete m_layers[i];
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
//--- Feed Forward
|
|
void CNeuralNet::FeedForward(const double &inputVals[])
|
|
{
|
|
// Check inputs match input layer size (minus bias)
|
|
CArrayObj *inputLayer = m_layers[0];
|
|
if(ArraySize(inputVals) != inputLayer.Total() - 1) {
|
|
Print("Error: NeuralNet Input Input size mismatch.");
|
|
return;
|
|
}
|
|
|
|
// Assign (latch) the input values into the input neurons
|
|
for(int i = 0; i < ArraySize(inputVals); ++i) {
|
|
CNeuron *n = (CNeuron*)inputLayer.At(i);
|
|
n.SetOutputVal(inputVals[i]);
|
|
}
|
|
|
|
// Forward propagate
|
|
for(int layerNum = 1; layerNum < ArraySize(m_layers); ++layerNum) {
|
|
CArrayObj *prevLayer = m_layers[layerNum - 1];
|
|
CArrayObj *currLayer = m_layers[layerNum];
|
|
|
|
// For each neuron in current layer (excluding bias)
|
|
for(int n = 0; n < currLayer.Total() - 1; ++n) {
|
|
CNeuron *neuron = (CNeuron*)currLayer.At(n);
|
|
neuron.FeedForward(prevLayer);
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
//--- Back Propagation
|
|
void CNeuralNet::BackProp(const double &targetVals[])
|
|
{
|
|
// Calculate overall net error (RMS of output neuron errors)
|
|
CArrayObj *outputLayer = m_layers[ArraySize(m_layers) - 1];
|
|
m_error = 0.0;
|
|
|
|
for(int n = 0; n < outputLayer.Total() - 1; ++n) {
|
|
CNeuron *neuron = (CNeuron*)outputLayer.At(n);
|
|
double delta = targetVals[n] - neuron.GetOutputVal();
|
|
m_error += delta * delta;
|
|
}
|
|
m_error /= (outputLayer.Total() - 1); // Get average error squared
|
|
m_error = sqrt(m_error); // RMS
|
|
|
|
// Implement a recent average measurement
|
|
m_recentAverageError =
|
|
(m_recentAverageError * m_recentAverageSmoothingFactor + m_error)
|
|
/ (m_recentAverageSmoothingFactor + 1.0);
|
|
|
|
// Calculate Output Layer Gradients
|
|
for(int n = 0; n < outputLayer.Total() - 1; ++n) {
|
|
CNeuron *neuron = (CNeuron*)outputLayer.At(n);
|
|
neuron.CalcOutputGradients(targetVals[n]);
|
|
}
|
|
|
|
// Calculate Gradients on Hidden Layers
|
|
for(int layerNum = ArraySize(m_layers) - 2; layerNum > 0; --layerNum) {
|
|
CArrayObj *hiddenLayer = m_layers[layerNum];
|
|
CArrayObj *nextLayer = m_layers[layerNum + 1];
|
|
|
|
for(int n = 0; n < hiddenLayer.Total(); ++n) {
|
|
CNeuron *neuron = (CNeuron*)hiddenLayer.At(n);
|
|
neuron.CalcHiddenGradients(nextLayer);
|
|
}
|
|
}
|
|
|
|
// For all layers from outputs to first hidden layer,
|
|
// update connection weights
|
|
for(int layerNum = ArraySize(m_layers) - 1; layerNum > 0; --layerNum) {
|
|
CArrayObj *layer = m_layers[layerNum];
|
|
CArrayObj *prevLayer = m_layers[layerNum - 1];
|
|
|
|
for(int n = 0; n < layer.Total() - 1; ++n) {
|
|
CNeuron *neuron = (CNeuron*)layer.At(n);
|
|
neuron.UpdateInputWeights(prevLayer);
|
|
}
|
|
}
|
|
}
|
|
|
|
//--- Get Results
|
|
void CNeuralNet::GetResults(double &resultVals[]) const
|
|
{
|
|
CArrayObj *outputLayer = m_layers[ArraySize(m_layers) - 1];
|
|
ArrayResize(resultVals, outputLayer.Total() - 1);
|
|
|
|
for(int n = 0; n < outputLayer.Total() - 1; ++n) {
|
|
CNeuron *neuron = (CNeuron*)outputLayer.At(n);
|
|
resultVals[n] = neuron.GetOutputVal();
|
|
}
|
|
}
|
|
|
|
//--- Save to file (Filename)
|
|
bool CNeuralNet::Save(string fileName, int flags=0)
|
|
{
|
|
int handle = FileOpen(fileName, FILE_WRITE|FILE_BIN|flags);
|
|
if(handle == INVALID_HANDLE) {
|
|
Print("Failed to save NeuralNet to ", fileName);
|
|
return false;
|
|
}
|
|
bool res = Save(handle);
|
|
FileClose(handle);
|
|
return res;
|
|
}
|
|
|
|
//--- Save to file (Handle)
|
|
bool CNeuralNet::Save(int handle)
|
|
{
|
|
if(handle == INVALID_HANDLE) return false;
|
|
|
|
for(int layerNum = 0; layerNum < ArraySize(m_layers) - 1; ++layerNum) {
|
|
CArrayObj *layer = m_layers[layerNum];
|
|
for(int n = 0; n < layer.Total(); ++n) {
|
|
CNeuron *neuron = (CNeuron*)layer.At(n);
|
|
neuron.SaveWeights(handle);
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
|
|
//--- Load from file (Filename)
|
|
bool CNeuralNet::Load(string fileName, const int &topology[], int flags=0)
|
|
{
|
|
if(ArraySize(topology) != ArraySize(m_layers)) {
|
|
PrintFormat("Error: Topology size mismatch. Expected %d layers, but Topology has %d layers.", ArraySize(m_layers), ArraySize(topology));
|
|
return false;
|
|
}
|
|
|
|
int handle = FileOpen(fileName, FILE_READ|FILE_BIN|flags);
|
|
if(handle == INVALID_HANDLE) {
|
|
PrintFormat("Error: Failed to open file '%s'. Error Code: %d", fileName, GetLastError());
|
|
return false;
|
|
}
|
|
|
|
bool res = Load(handle, topology);
|
|
FileClose(handle);
|
|
return res;
|
|
}
|
|
|
|
//--- Load from file (Handle)
|
|
bool CNeuralNet::Load(int handle, const int &topology[])
|
|
{
|
|
if(handle == INVALID_HANDLE) return false;
|
|
|
|
for(int layerNum = 0; layerNum < ArraySize(m_layers) - 1; ++layerNum) {
|
|
CArrayObj *layer = m_layers[layerNum];
|
|
// Note: Use topology[layerNum+1] for numOutputs
|
|
for(int n = 0; n < layer.Total(); ++n) {
|
|
CNeuron *neuron = (CNeuron*)layer.At(n);
|
|
neuron.LoadWeights(handle, topology[layerNum+1]);
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
|
|
//--- Set Learning Rate for all neurons
|
|
void CNeuralNet::SetLearningRate(double rate)
|
|
{
|
|
eta = rate;
|
|
for(int i=0; i<ArraySize(m_layers); i++) {
|
|
for(int j=0; j<m_layers[i].Total(); j++) {
|
|
CNeuron *neuron = (CNeuron*)m_layers[i].At(j);
|
|
neuron.m_eta = rate;
|
|
}
|
|
}
|
|
}
|
|
|
|
//--- Set Alpha (Momentum) for all neurons
|
|
void CNeuralNet::SetAlpha(double a)
|
|
{
|
|
alpha = a;
|
|
for(int i=0; i<ArraySize(m_layers); i++) {
|
|
for(int j=0; j<m_layers[i].Total(); j++) {
|
|
CNeuron *neuron = (CNeuron*)m_layers[i].At(j);
|
|
neuron.m_alpha = a;
|
|
}
|
|
}
|
|
}
|
|
|
|
//--- Set Lambda (Regularization) for all neurons
|
|
void CNeuralNet::SetLambda(double l)
|
|
{
|
|
lambda = l;
|
|
for(int i=0; i<ArraySize(m_layers); i++) {
|
|
for(int j=0; j<m_layers[i].Total(); j++) {
|
|
CNeuron *neuron = (CNeuron*)m_layers[i].At(j);
|
|
neuron.m_lambda = l;
|
|
}
|
|
}
|
|
}
|
|
|
|
//--- Add noise to all weights in the network
|
|
void CNeuralNet::Jitter(double magnitude)
|
|
{
|
|
for(int layerNum = 0; layerNum < ArraySize(m_layers) - 1; ++layerNum) {
|
|
CArrayObj *layer = m_layers[layerNum];
|
|
for(int n = 0; n < layer.Total(); ++n) {
|
|
CNeuron *neuron = (CNeuron*)layer.At(n);
|
|
neuron.Jitter(magnitude);
|
|
}
|
|
}
|
|
}
|