//+------------------------------------------------------------------+ //| Check_Gradient_lstm.mq5 | //| Copyright 2021, MetaQuotes Ltd. | //| https://www.mql5.com | //+------------------------------------------------------------------+ #property copyright "Copyright 2021, MetaQuotes Ltd." #property link "https://www.mql5.com" #property version "1.00" #property script_show_inputs //+------------------------------------------------------------------+ //| Внешние параметры для работы скрипта | //+------------------------------------------------------------------+ input string StudyFileName = "study_data.csv";// Имя файла с обучающей выборкой input int BarsToLine = 40; // Количество исторических баров в одном паттерне input bool UseOpenCL = true; // Использовать OpenCL //+------------------------------------------------------------------+ //| Подключаем библиотеку нейронной сети | //+------------------------------------------------------------------+ #include CNet *net; //+------------------------------------------------------------------+ //| Начало программы скрипта | //+------------------------------------------------------------------+ void OnStart() { //--- CArrayObj *layers = new CArrayObj(); if(CheckPointer(layers) == POINTER_INVALID) { PrintFormat("Error of create CArrayObj: %d", GetLastError()); return; } //--- Слой исходных данных CLayerDescription *descr = new CLayerDescription(); if(CheckPointer(descr) == POINTER_INVALID) { PrintFormat("Error of create CLayerDescription: %d", GetLastError()); delete layers; return; } descr.type = defNeuronBase; descr.count = 4 * BarsToLine; descr.window = 0; descr.activation = ACT_None; descr.optimization = None; if(!layers.Add(descr)) { PrintFormat("Error of add layer: %d", GetLastError()); delete layers; delete descr; return; } //--- Реккурентный слой descr = new CLayerDescription(); if(CheckPointer(descr) == POINTER_INVALID) { PrintFormat("Error of create CLayerDescription: %d", GetLastError()); delete layers; return; } descr.type = defNeuronLSTM; descr.count = BarsToLine; descr.window_out = 2; descr.activation = ACT_None; descr.optimization = Adam; descr.activation_params[0] = 1; if(!layers.Add(descr)) { PrintFormat("Error of add layer: %d", GetLastError()); delete layers; delete descr; return; } //--- Слой результатов descr = new CLayerDescription(); if(CheckPointer(descr) == POINTER_INVALID) { PrintFormat("Error of create CLayerDescription: %d", GetLastError()); delete layers; return; } descr.type = defNeuronBase; descr.count = 1; descr.activation = ACT_LINE; descr.optimization = Adam; descr.activation_params[0] = 1; descr.activation_params[1] = 0; if(!layers.Add(descr)) { PrintFormat("Error of add layer: %d", GetLastError()); delete layers; delete descr; return; } //--- Инициализируем нейронную сеть net = new CNet(); if(CheckPointer(net) == POINTER_INVALID) { PrintFormat("Error of create Net: %d", GetLastError()); delete layers; return; } //--- if(!net.Create(layers, 3.0e-4, 0.9, 0.999, ENUM_LOSS_MSE, 0, 0)) { PrintFormat("Error of init Net: %d", GetLastError()); delete layers; delete net; return; } delete layers; net.UseOpenCL(UseOpenCL); PrintFormat("Use OpenCL %s", (string)net.UseOpenCL()); //--- Открываем файл и считываем один паттерн исходных данных int handle = FileOpen(StudyFileName, FILE_READ | FILE_CSV | FILE_ANSI, ",", CP_UTF8); if(handle == INVALID_HANDLE) { PrintFormat("Error of open study data file: %d", GetLastError()); delete net; return; } //--- Создаем буфер для считывания исходных данных CArrayDouble *pattern = new CArrayDouble(); if(CheckPointer(pattern) == POINTER_INVALID) { PrintFormat("Error of create Pattern data array: %d", GetLastError()); delete net; return; } if(!FileIsEnding(handle) && !IsStopped()) { for(int i = 0; i < 4 * BarsToLine; i++) { if(!pattern.Add(FileReadNumber(handle))) { PrintFormat("Error of read study data from file: %d", GetLastError()); delete net; delete pattern; return; } } } FileClose(handle); //--- Делаем прямой и обратный проход для получения аналитических градиентов const double delta = 1.0e-5; double dd = 0; CArrayDouble *init_pattern = new CArrayDouble(); init_pattern.AssignArray(pattern); if(!net.FeedForward(pattern)) { PrintFormat("Error in FeedForward: %d", GetLastError()); delete net; delete pattern; delete init_pattern; return; } CBufferDouble *etalon_result = new CBufferDouble(); if(!net.GetResults(etalon_result)) { PrintFormat("Error in GetResult: %d", GetLastError()); delete net; delete pattern; delete init_pattern; delete etalon_result; return; } //--- Создвем буфер результатов CBufferDouble *target = new CBufferDouble(); if(CheckPointer(target) == POINTER_INVALID) { PrintFormat("Error of create Pattern Target array: %d", GetLastError()); delete net; delete pattern; delete init_pattern; delete etalon_result; return; } //--- Сохраняем в отдельные буферы полученные данные target.AssignArray(etalon_result); target.Update(0, etalon_result.At(0) + delta); if(!net.Backpropagation(target)) { PrintFormat("Error in Backpropagation: %d", GetLastError()); delete net; delete pattern; delete init_pattern; delete etalon_result; delete target; return; } CBufferDouble *input_gradient = net.GetGradient(0); CBufferDouble *weights = net.GetWeights(1); CBufferDouble *weights_gradient = net.GetDeltaWeights(1); //--- В цикле поочередно изменяем элементы исходных данных и сравниваем эмпирический результат с значением аналитического метода for(int k = 0; k < 4 * BarsToLine; k++) { pattern.AssignArray(init_pattern); pattern.Update(k, init_pattern.At(k) + delta); if(!net.FeedForward(pattern)) { PrintFormat("Error in FeedForward: %d", GetLastError()); delete net; delete pattern; delete init_pattern; delete etalon_result; delete target; return; } if(!net.GetResults(target)) { PrintFormat("Error in GetResult: %d", GetLastError()); delete net; delete pattern; delete init_pattern; delete etalon_result; delete target; return; } double d = target.At(0) - etalon_result.At(0); pattern.Update(k, init_pattern.At(k) - delta); if(!net.FeedForward(pattern)) { PrintFormat("Error in FeedForward: %d", GetLastError()); delete net; delete pattern; delete init_pattern; delete etalon_result; delete target; return; } if(!net.GetResults(target)) { PrintFormat("Error in GetResult: %d", GetLastError()); delete net; delete pattern; delete init_pattern; delete etalon_result; delete target; return; } d -= target.At(0) - etalon_result.At(0); d /= (2 * delta); dd += input_gradient.At(k) / delta - d; } delete pattern; //--- Выводим в журнал суммарное значение отклонений на уровне исходных данных PrintFormat("Delta at input gradient between methods %.5e", dd); //--- Перед выходом из скрипта очищаем память delete init_pattern; delete etalon_result; delete target; //--- delete net; } //+------------------------------------------------------------------+