Original_NNB/MQL5/Scripts/NeuroNetworksBook/attention/attention_test.mq5

269 lines
18 KiB
MQL5
Raw Permalink Normal View History

2025-05-30 16:15:14 +02:00
<EFBFBD><EFBFBD>//+------------------------------------------------------------------+
//| Attention_test.mq5 |
//| Copyright 2021, MetaQuotes Ltd. |
//| https://www.mql5.com |
//+------------------------------------------------------------------+
#property copyright "Copyright 2021, MetaQuotes Ltd."
#property link "https://www.mql5.com"
#property version "1.00"
#property script_show_inputs
//+------------------------------------------------------------------+
//| =5H=85 ?0@0<5B@K 4;O @01>BK A:@8?B0 |
//+------------------------------------------------------------------+
input string StudyFileName = "study_data.csv";// <O D09;0 A >1CG0NI59 2K1>@:>9
input string OutputFileName = "loss_study_attention.csv";// <O D09;0 4;O 70?8A8 48=0<8:8 >H81:8
input int BarsToLine = 40; // >;8G5AB2> 8AB>@8G5A:8E 10@>2 2 >4=>< ?0BB5@=5
input int NeuronsToBar = 4; // >;8G5AB2> =59@>=>2 2E>4=>3> A;>O =0 1 10@
input bool UseOpenCL = false; // A?>;L7>20BL OpenCL
input int BatchSize = 10000; // 07<5@ ?0:5B0 4;O >1=>2;5=8O <0B@8FK 25A>2
input double LearningRate = 0.0001; // >MDD8F85=B >1CG5=8O
input int HiddenLayers = 3; // >;8G5AB2> A:@KBKE A;>Q2
input int HiddenLayer = 40; // >;8G5AB2> =59@>=>2 2 >4=>< A:@KB>< A;>5
input int Epochs = 1000; // >;8G5AB2> F8:;>2 >1=>2;5=88O <0B@8FK 25A>2
//+------------------------------------------------------------------+
//| >4:;NG05< 181;8>B5:C =59@>==>9 A5B8 |
//+------------------------------------------------------------------+
#include <NeuroNetworksBook\realization\neuronnet.mqh>
CNet *net;
//+------------------------------------------------------------------+
//| 0G0;> ?@>3@0<<K A:@8?B0 |
//+------------------------------------------------------------------+
void OnStart()
{
//---
CArrayObj *layers = new CArrayObj();
if(CheckPointer(layers) == POINTER_INVALID)
{
PrintFormat("Error of create CArrayObj: %d", GetLastError());
return;
}
//--- !;>9 8AE>4=KE 40==KE
CLayerDescription *descr = new CLayerDescription();
if(CheckPointer(descr) == POINTER_INVALID)
{
PrintFormat("Error of create CLayerDescription: %d", GetLastError());
delete layers;
return;
}
descr.type = defNeuronBase;
int prev_count = descr.count = NeuronsToBar * BarsToLine;
descr.window = 0;
descr.activation = ACT_None;
descr.optimization = None;
if(!layers.Add(descr))
{
PrintFormat("Error of add layer: %d", GetLastError());
delete descr;
delete layers;
return;
}
//--- !;>9 2=8<0=8O
descr = new CLayerDescription();
if(CheckPointer(descr) == POINTER_INVALID)
{
PrintFormat("Error of create CLayerDescription: %d", GetLastError());
delete layers;
return;
}
descr.type = defNeuronAttention;
descr.count = BarsToLine;
descr.window=4;
descr.window_out = 4;
descr.optimization = Adam;
descr.activation_params[0] = 1;
if(!layers.Add(descr))
{
PrintFormat("Error of add layer: %d", GetLastError());
delete layers;
delete descr;
return;
}
//--- !:@KBK5 A;>8
descr = new CLayerDescription();
if(CheckPointer(descr) == POINTER_INVALID)
{
PrintFormat("Error of create CLayerDescription: %d", GetLastError());
delete layers;
return;
}
descr.type = defNeuronBase;
descr.count = HiddenLayer;
descr.activation = ACT_SWISH;
descr.optimization = Adam;
descr.activation_params[0] = 1;
for(int i = 0; i < HiddenLayers; i++)
{
if(!layers.Add(descr))
{
PrintFormat("Error of add layer: %d", GetLastError());
delete layers;
delete descr;
return;
}
}
//--- !;>9 @57C;LB0B>2
descr = new CLayerDescription();
if(CheckPointer(descr) == POINTER_INVALID)
{
PrintFormat("Error of create CLayerDescription: %d", GetLastError());
delete layers;
return;
}
descr.type = defNeuronBase;
descr.count = 2;
descr.activation = ACT_LINE;
descr.optimization = Adam;
descr.activation_params[0] = 1;
if(!layers.Add(descr))
{
PrintFormat("Error of add layer: %d", GetLastError());
delete layers;
delete descr;
return;
}
//---
net = new CNet();
if(CheckPointer(net) == POINTER_INVALID)
{
PrintFormat("Error of create Net: %d", GetLastError());
return;
}
//---
if(!net.Create(layers, LearningRate, 0.9, 0.999, ENUM_LOSS_MSE, 0, 0))
{
PrintFormat("Error of init Net: %d", GetLastError());
return;
}
delete layers;
net.UseOpenCL(UseOpenCL);
net.LossSmoothFactor(BatchSize);
//---
CArrayObj *data = new CArrayObj();
if(CheckPointer(data) == POINTER_INVALID)
{
PrintFormat("Error of create Historical data array: %d", GetLastError());
return;
}
//---
CArrayObj *result = new CArrayObj();
if(CheckPointer(result) == POINTER_INVALID)
{
PrintFormat("Error of create Target data array: %d", GetLastError());
return;
}
CBufferDouble *pattern;
//---
CBufferDouble *target;
//--- B:@K205< D09; 8 AG8BK205< 8AE>4=KE 40==KE
int handle = FileOpen(StudyFileName, FILE_READ | FILE_CSV | FILE_ANSI | FILE_SHARE_READ, ",", CP_UTF8);
if(handle == INVALID_HANDLE)
{
PrintFormat("Error of open study data file: %d", GetLastError());
return;
}
while(!FileIsEnding(handle) && !IsStopped())
{
pattern = new CBufferDouble();
if(CheckPointer(pattern) == POINTER_INVALID)
{
PrintFormat("Error of create Pattern data array: %d", GetLastError());
return;
}
target = new CBufferDouble();
if(CheckPointer(target) == POINTER_INVALID)
{
PrintFormat("Error of create Pattern Target array: %d", GetLastError());
return;
}
for(int i = 0; i < NeuronsToBar * BarsToLine; i++)
{
if(!pattern.Add(FileReadNumber(handle)))
{
PrintFormat("Error of read study data from file: %d", GetLastError());
return;
}
}
for(int i = 0; i < 2; i++)
if(!target.Add(FileReadNumber(handle)))
{
PrintFormat("Error of read study data from file: %d", GetLastError());
return;
}
if(!data.Add(pattern))
{
PrintFormat("Error of add study data to array: %d", GetLastError());
return;
}
if(!result.Add(target))
{
PrintFormat("Error of add study data to array: %d", GetLastError());
return;
}
int total = data.Total();
Comment(StringFormat("Read %d patterns", total));
}
FileClose(handle);
//---
const int patterns = data.Total();
CArrayDouble *loss = new CArrayDouble();
if(CheckPointer(loss) == POINTER_INVALID)
{
PrintFormat("Error of create loss array: %d", GetLastError());
return;
}
for(int epoch = 0; epoch < Epochs && !IsStopped(); epoch++)
{
for(int i = 0; i < BatchSize && !IsStopped(); i++)
{
int k = (int)((double)(MathRand() * MathRand()) / MathPow(32767.0, 2) * patterns);
pattern = data.At(k);
target = result.At(k);
if(CheckPointer(pattern) == POINTER_INVALID || CheckPointer(target) == POINTER_INVALID)
{
i--;
continue;
}
if(!net.FeedForward(pattern))
{
PrintFormat("Error in FeedForward: %d", GetLastError());
return;
}
if(!net.Backpropagation(target))
{
PrintFormat("Error in Backpropagation: %d", GetLastError());
return;
}
}
net.UpdateWeights(BatchSize);
if(!loss.Add(net.GetRecentAverageLoss()))
{
PrintFormat("Error of add loss to array: %d", GetLastError());
return;
}
string s;
Comment(s = StringFormat("Studied %d epoch, error %.5f", epoch, net.GetRecentAverageLoss()));
}
handle = FileOpen(OutputFileName, FILE_WRITE | FILE_CSV | FILE_ANSI, ",", CP_UTF8);
if(handle == INVALID_HANDLE)
{
PrintFormat("Error of open loss file: %d", GetLastError());
return;
}
for(int i = 0; i < loss.Total(); i++)
{
FileWrite(handle, loss.At(i));
}
FileClose(handle);
PrintFormat("The dynamics of the error change is saved to a file %s\\%s",
TerminalInfoString(TERMINAL_DATA_PATH), OutputFileName);
net.Save("Study.net");
delete net;
delete loss;
delete data;
delete result;
delete pattern;
delete target;
Comment("");
}
//+------------------------------------------------------------------+