Original_NNB/MQL5/Scripts/NeuroNetworksBook/rnn/check_gradient_lstm.mq5

256 lines
17 KiB
MQL5
Raw Permalink Normal View History

2025-05-30 16:15:14 +02:00
<EFBFBD><EFBFBD>//+------------------------------------------------------------------+
//| Check_Gradient_lstm.mq5 |
//| Copyright 2021, MetaQuotes Ltd. |
//| https://www.mql5.com |
//+------------------------------------------------------------------+
#property copyright "Copyright 2021, MetaQuotes Ltd."
#property link "https://www.mql5.com"
#property version "1.00"
#property script_show_inputs
//+------------------------------------------------------------------+
//| =5H=85 ?0@0<5B@K 4;O @01>BK A:@8?B0 |
//+------------------------------------------------------------------+
input string StudyFileName = "study_data.csv";// <O D09;0 A >1CG0NI59 2K1>@:>9
input int BarsToLine = 40; // >;8G5AB2> 8AB>@8G5A:8E 10@>2 2 >4=>< ?0BB5@=5
input bool UseOpenCL = true; // A?>;L7>20BL OpenCL
//+------------------------------------------------------------------+
//| >4:;NG05< 181;8>B5:C =59@>==>9 A5B8 |
//+------------------------------------------------------------------+
#include <NeuroNetworksBook\realization\neuronnet.mqh>
CNet *net;
//+------------------------------------------------------------------+
//| 0G0;> ?@>3@0<<K A:@8?B0 |
//+------------------------------------------------------------------+
void OnStart()
{
//---
CArrayObj *layers = new CArrayObj();
if(CheckPointer(layers) == POINTER_INVALID)
{
PrintFormat("Error of create CArrayObj: %d", GetLastError());
return;
}
//--- !;>9 8AE>4=KE 40==KE
CLayerDescription *descr = new CLayerDescription();
if(CheckPointer(descr) == POINTER_INVALID)
{
PrintFormat("Error of create CLayerDescription: %d", GetLastError());
delete layers;
return;
}
descr.type = defNeuronBase;
descr.count = 4 * BarsToLine;
descr.window = 0;
descr.activation = ACT_None;
descr.optimization = None;
if(!layers.Add(descr))
{
PrintFormat("Error of add layer: %d", GetLastError());
delete layers;
delete descr;
return;
}
//--- 5::C@5=B=K9 A;>9
descr = new CLayerDescription();
if(CheckPointer(descr) == POINTER_INVALID)
{
PrintFormat("Error of create CLayerDescription: %d", GetLastError());
delete layers;
return;
}
descr.type = defNeuronLSTM;
descr.count = BarsToLine;
descr.window_out = 2;
descr.activation = ACT_None;
descr.optimization = Adam;
descr.activation_params[0] = 1;
if(!layers.Add(descr))
{
PrintFormat("Error of add layer: %d", GetLastError());
delete layers;
delete descr;
return;
}
//--- !;>9 @57C;LB0B>2
descr = new CLayerDescription();
if(CheckPointer(descr) == POINTER_INVALID)
{
PrintFormat("Error of create CLayerDescription: %d", GetLastError());
delete layers;
return;
}
descr.type = defNeuronBase;
descr.count = 1;
descr.activation = ACT_LINE;
descr.optimization = Adam;
descr.activation_params[0] = 1;
descr.activation_params[1] = 0;
if(!layers.Add(descr))
{
PrintFormat("Error of add layer: %d", GetLastError());
delete layers;
delete descr;
return;
}
//--- =8F80;878@C5< =59@>==CN A5BL
net = new CNet();
if(CheckPointer(net) == POINTER_INVALID)
{
PrintFormat("Error of create Net: %d", GetLastError());
delete layers;
return;
}
//---
if(!net.Create(layers, 3.0e-4, 0.9, 0.999, ENUM_LOSS_MSE, 0, 0))
{
PrintFormat("Error of init Net: %d", GetLastError());
delete layers;
delete net;
return;
}
delete layers;
net.UseOpenCL(UseOpenCL);
PrintFormat("Use OpenCL %s", (string)net.UseOpenCL());
//--- B:@K205< D09; 8 AG8BK205< >48= ?0BB5@= 8AE>4=KE 40==KE
int handle = FileOpen(StudyFileName, FILE_READ | FILE_CSV | FILE_ANSI, ",", CP_UTF8);
if(handle == INVALID_HANDLE)
{
PrintFormat("Error of open study data file: %d", GetLastError());
delete net;
return;
}
//--- !>7405< 1CD5@ 4;O AG8BK20=8O 8AE>4=KE 40==KE
CArrayDouble *pattern = new CArrayDouble();
if(CheckPointer(pattern) == POINTER_INVALID)
{
PrintFormat("Error of create Pattern data array: %d", GetLastError());
delete net;
return;
}
if(!FileIsEnding(handle) && !IsStopped())
{
for(int i = 0; i < 4 * BarsToLine; i++)
{
if(!pattern.Add(FileReadNumber(handle)))
{
PrintFormat("Error of read study data from file: %d", GetLastError());
delete net;
delete pattern;
return;
}
}
}
FileClose(handle);
//--- 5;05< ?@O<>9 8 >1@0B=K9 ?@>E>4 4;O ?>;CG5=8O 0=0;8B8G5A:8E 3@0485=B>2
const double delta = 1.0e-5;
double dd = 0;
CArrayDouble *init_pattern = new CArrayDouble();
init_pattern.AssignArray(pattern);
if(!net.FeedForward(pattern))
{
PrintFormat("Error in FeedForward: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
return;
}
CBufferDouble *etalon_result = new CBufferDouble();
if(!net.GetResults(etalon_result))
{
PrintFormat("Error in GetResult: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
return;
}
//--- !>7425< 1CD5@ @57C;LB0B>2
CBufferDouble *target = new CBufferDouble();
if(CheckPointer(target) == POINTER_INVALID)
{
PrintFormat("Error of create Pattern Target array: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
return;
}
//--- !>E@0=O5< 2 >B45;L=K5 1CD5@K ?>;CG5==K5 40==K5
target.AssignArray(etalon_result);
target.Update(0, etalon_result.At(0) + delta);
if(!net.Backpropagation(target))
{
PrintFormat("Error in Backpropagation: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
CBufferDouble *input_gradient = net.GetGradient(0);
CBufferDouble *weights = net.GetWeights(1);
CBufferDouble *weights_gradient = net.GetDeltaWeights(1);
//---  F8:;5 ?>>G5@54=> 87<5=O5< M;5<5=BK 8AE>4=KE 40==KE 8 A@02=8205< M<?8@8G5A:89 @57C;LB0B A 7=0G5=85< 0=0;8B8G5A:>3> <5B>40
for(int k = 0; k < 4 * BarsToLine; k++)
{
pattern.AssignArray(init_pattern);
pattern.Update(k, init_pattern.At(k) + delta);
if(!net.FeedForward(pattern))
{
PrintFormat("Error in FeedForward: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
if(!net.GetResults(target))
{
PrintFormat("Error in GetResult: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
double d = target.At(0) - etalon_result.At(0);
pattern.Update(k, init_pattern.At(k) - delta);
if(!net.FeedForward(pattern))
{
PrintFormat("Error in FeedForward: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
if(!net.GetResults(target))
{
PrintFormat("Error in GetResult: %d", GetLastError());
delete net;
delete pattern;
delete init_pattern;
delete etalon_result;
delete target;
return;
}
d -= target.At(0) - etalon_result.At(0);
d /= (2 * delta);
dd += input_gradient.At(k) / delta - d;
}
delete pattern;
//--- K2>48< 2 6C@=0; AC<<0@=>5 7=0G5=85 >B:;>=5=89 =0 C@>2=5 8AE>4=KE 40==KE
PrintFormat("Delta at input gradient between methods %.5e", dd);
//--- 5@54 2KE>4>< 87 A:@8?B0 >G8I05< ?0<OBL
delete init_pattern;
delete etalon_result;
delete target;
//---
delete net;
}
//+------------------------------------------------------------------+