Original_NNB/MQL5/Scripts/NeuroNetworksBook/convolution/check_gradient_conv.mq5
super.admin e81e22b7b8 convert
2025-05-30 16:15:14 +02:00

356 lines
23 KiB
MQL5

//+------------------------------------------------------------------+
//| Check_Gradient_conv.mq5 |
//| Copyright 2021, MetaQuotes Ltd. |
//| https://www.mql5.com |
//+------------------------------------------------------------------+
#property copyright "Copyright 2021, MetaQuotes Ltd."
#property link "https://www.mql5.com"
#property version "1.00"
#property script_show_inputs
//+------------------------------------------------------------------+
//| Внешние параметры для работы скрипта |
//+------------------------------------------------------------------+
input string StudyFileName = "study_data.csv";// Имя файла с обучающей выборкой
input int BarsToLine = 40; // Количество исторических баров в одном паттерне
input bool UseOpenCL = true; // Использовать OpenCL
//+------------------------------------------------------------------+
//| Подключаем библиотеку нейронной сети |
//+------------------------------------------------------------------+
#include <NeuroNetworksBook\realization\neuronnet.mqh>
CNet *net;
//+------------------------------------------------------------------+
//| Начало программы скрипта |
//+------------------------------------------------------------------+
void OnStart()
{
//---
CArrayObj *layers = new CArrayObj();
if(CheckPointer(layers) == POINTER_INVALID)
{
PrintFormat("Error of create CArrayObj: %d", GetLastError());
return;
}
//--- Слой исходных данных
CLayerDescription *descr = new CLayerDescription();
if(CheckPointer(descr) == POINTER_INVALID)
{
PrintFormat("Error of create CLayerDescription: %d", GetLastError());
delete layers;
return;
}
descr.type = defNeuronBase;
descr.count = 4 * BarsToLine;
descr.window = 0;
descr.activation = ACT_None;
descr.optimization = None;
if(!layers.Add(descr))
{
PrintFormat("Error of add layer: %d", GetLastError());
delete layers;
delete descr;
return;
}
//--- Свёрточный слой
descr = new CLayerDescription();
if(CheckPointer(descr) == POINTER_INVALID)
{
PrintFormat("Error of create CLayerDescription: %d", GetLastError());
delete layers;
return;
}
descr.type = defNeuronConv;
descr.count = BarsToLine;
descr.window = 4;
descr.window_out = 8;
descr.step = 4;
descr.activation = ACT_SWISH;
descr.optimization = Adam;
descr.activation_params[0] = 1;
if(!layers.Add(descr))
{
PrintFormat("Error of add layer: %d", GetLastError());
delete layers;
delete descr;
return;
}
//--- Подвыборочный слой
descr = new CLayerDescription();
if(CheckPointer(descr) == POINTER_INVALID)
{
PrintFormat("Error of create CLayerDescription: %d", GetLastError());
delete layers;
return;
}
descr.type = defNeuronProof;
descr.count = BarsToLine / 2;
descr.window = 4;
descr.window_out = 8;
descr.step = 2;
descr.activation = ACT_AVERAGE_POOLING;
descr.optimization = None;
descr.activation_params[0] = 1;
if(!layers.Add(descr))
{
PrintFormat("Error of add layer: %d", GetLastError());
delete layers;
delete descr;
return;
}
//--- Слой результатов
descr = new CLayerDescription();
if(CheckPointer(descr) == POINTER_INVALID)
{
PrintFormat("Error of create CLayerDescription: %d", GetLastError());
delete layers;
return;
}
descr.type = defNeuronBase;
descr.count = 1;
descr.activation = ACT_LINE;
descr.optimization = Adam;
descr.activation_params[0] = 1;
descr.activation_params[1] = 0;
if(!layers.Add(descr))
{
PrintFormat("Error of add layer: %d", GetLastError());
delete layers;
delete descr;
return;
}
//--- Инициализируем нейронную сеть
net = new CNet();
if(CheckPointer(net) == POINTER_INVALID)
{
PrintFormat("Error of create Net: %d", GetLastError());
delete layers;
return;
}
//---
if(!net.Create(layers, 3.0e-4, 0.9, 0.999, ENUM_LOSS_MSE, 0, 0))
{
PrintFormat("Error of init Net: %d", GetLastError());
delete layers;
delete net;
return;
}
delete layers;
net.UseOpenCL(UseOpenCL);
PrintFormat("Use OpenCL %s", (string)net.UseOpenCL());
//--- Открываем файл и считываем один паттерн исходных данных
int handle = FileOpen(StudyFileName, FILE_READ | FILE_CSV | FILE_ANSI, ",", CP_UTF8);
if(handle == INVALID_HANDLE)
{
PrintFormat("Error of open study data file: %d", GetLastError());
delete net;
return;
}
//--- Создаем буфер для считывания исходных данных
CArrayDouble *pattern = new CArrayDouble();
if(CheckPointer(pattern) == POINTER_INVALID)
{
PrintFormat("Error of create Pattern data array: %d", GetLastError());
delete net;
return;
}
if(!FileIsEnding(handle) && !IsStopped())
{
for(int i = 0; i < 4 * BarsToLine; i++)
{
if(!pattern.Add(FileReadNumber(handle)))
{
PrintFormat("Error of read study data from file: %d", GetLastError());
delete net;
delete pattern;
return;
}
}
}
FileClose(handle);
//--- Делаем прямой и обратный проход для получения аналитических градиентов
const double delta = 1.0e-5;
double dd = 0;
CArrayDouble *init_pattern = new CArrayDouble();
init_pattern.AssignArray(pattern);
if(!net.FeedForward(pattern))
{
PrintFormat("Error in FeedForward: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
return;
}
CBufferDouble *etalon_result = new CBufferDouble();
if(!net.GetResults(etalon_result))
{
PrintFormat("Error in GetResult: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
return;
}
//--- Создвем буфер результатов
CBufferDouble *target = new CBufferDouble();
if(CheckPointer(target) == POINTER_INVALID)
{
PrintFormat("Error of create Pattern Target array: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
return;
}
//--- Сохраняем в отдельные буферы полученные данные
target.AssignArray(etalon_result);
target.Update(0, etalon_result.At(0) + delta);
if(!net.Backpropagation(target))
{
PrintFormat("Error in Backpropagation: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
CBufferDouble *input_gradient = net.GetGradient(0);
CBufferDouble *weights = net.GetWeights(1);
CBufferDouble *weights_gradient = net.GetDeltaWeights(1);
//--- В цикле поочередно изменяем элементы исходных данных и сравниваем эмпирический результат с значением аналитического метода
for(int k = 0; k < 4 * BarsToLine; k++)
{
pattern.AssignArray(init_pattern);
pattern.Update(k, init_pattern.At(k) + delta);
if(!net.FeedForward(pattern))
{
PrintFormat("Error in FeedForward: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
if(!net.GetResults(target))
{
PrintFormat("Error in GetResult: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
double d = target.At(0) - etalon_result.At(0);
pattern.Update(k, init_pattern.At(k) - delta);
if(!net.FeedForward(pattern))
{
PrintFormat("Error in FeedForward: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
if(!net.GetResults(target))
{
PrintFormat("Error in GetResult: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
d -= target.At(0) - etalon_result.At(0);
d /= (2 * delta);
dd += input_gradient.At(k) / delta - d;
}
delete pattern;
//--- Выводим в журнал суммарное значение отклонений на уровне исходных данных
PrintFormat("Delta at input gradient between methods %.5e", dd);
//--- Обнуляем значение суммы и повторяем цикл для градиентов весовых коэффициентов
dd = 0;
CBufferDouble *initial_weights = new CBufferDouble();
if(CheckPointer(initial_weights) == POINTER_INVALID)
{
PrintFormat("Error of create etalon weights buffer: %d", GetLastError());
delete net;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
if(!initial_weights.AssignArray(weights))
{
PrintFormat("Error of copy weights to initial weights buffer: %d", GetLastError());
delete net;
delete initial_weights;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
for(int k = 0; k < weights.Total(); k++)
{
if(k > 0)
weights.Update(k - 1, initial_weights.At(k - 1));
weights.Update(k, initial_weights.At(k) + delta);
if(!net.FeedForward(init_pattern))
{
PrintFormat("Error in FeedForward: %d", GetLastError());
delete net;
delete initial_weights;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
if(!net.GetResults(target))
{
PrintFormat("Error in GetResult: %d", GetLastError());
delete net;
delete initial_weights;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
double d = target.At(0) - etalon_result.At(0);
weights.Update(k, initial_weights.At(k) - delta);
if(!net.FeedForward(init_pattern))
{
PrintFormat("Error in FeedForward: %d", GetLastError());
delete net;
delete initial_weights;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
if(!net.GetResults(target))
{
PrintFormat("Error in GetResult: %d", GetLastError());
delete net;
delete initial_weights;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
d -= target.At(0) - etalon_result.At(0);
d /= (2 * delta);
dd += weights_gradient.At(k) / delta - d;
}
//--- Выводим в журнал суммарное значение отколнений на уровне весовых коэффициентов
PrintFormat("Delta at weights gradient between methods %.5e", dd);
//--- Перед выходом из скрипта очищаем память
delete init_pattern;
delete etalon_result;
delete initial_weights;
delete target;
//---
delete net;
}
//+------------------------------------------------------------------+