Original_NNB/MQL5/Include/NeuroNetworksBook/realization/neuronnet.mqh

874 lines
62 KiB
MQL5
Raw Permalink Normal View History

2025-05-30 16:15:14 +02:00
<EFBFBD><EFBFBD>//+------------------------------------------------------------------+
//| NeuronNet.mqh |
//| Copyright 2021, MetaQuotes Ltd. |
//| https://www.mql5.com |
//+------------------------------------------------------------------+
#property copyright "Copyright 2021, MetaQuotes Ltd."
#property link "https://www.mql5.com"
//+------------------------------------------------------------------+
//| >4:;NG05< 181;8>B5:8 |
//+------------------------------------------------------------------+
#include "arraylayers.mqh"
#include "positionencoder.mqh"
#include "lossfunction.mqh"
//+------------------------------------------------------------------+
//| Class CNet |
//| 07=0G5=85: 07>2V9 48A?5BG5@A:89 :;0AA >@30=870F88 @01>BK |
//| =59@>==>9 A5B8 |
//+------------------------------------------------------------------+
class CNet : public CObject
{
protected:
bool m_bTrainMode;
CArrayLayers *m_cLayers;
CMyOpenCL *m_cOpenCL;
bool m_bOpenCL;
double m_dNNLoss;
int m_iLossSmoothFactor;
CPositionEncoder *m_cPositionEncoder;
bool m_bPositionEncoder;
CLossFunction *m_cLossFunction;
double m_adLambda[2];
double m_dLearningRate;
double m_adBeta[2];
public:
CNet(void);
~CNet(void);
//--- 5B>4K A>740=8O >1J5:B0
bool Create(CArrayObj *descriptions);
bool Create(CArrayObj *descriptions, double learning_rate,
double beta1, double beta2);
bool Create(CArrayObj *descriptions, ENUM_LOSS_FUNCTION loss_function,
double lambda1, double lambda2);
bool Create(CArrayObj *descriptions, double learning_rate, double beta1,
double beta2, ENUM_LOSS_FUNCTION loss_function, double lambda1,
double lambda2);
//--- @30=870F8O @01>BK A OpenCL
void UseOpenCL(bool value);
bool UseOpenCL(void) const { return(m_bOpenCL); }
bool InitOpenCL(void);
//--- 5B>4K @01>BK A ?>78F8>==K< :>;8@>20=85<
void UsePositionEncoder(bool value);
bool UsePositionEncoder(void) const { return(m_bPositionEncoder); }
//--- @30=870F8O >A=>2=KE 0;3>@8B<>2 @01>BK <>45;8
bool FeedForward(CArrayDouble *inputs);
bool Backpropagation(CBufferDouble *target);
bool UpdateWeights(uint batch_size = 1);
bool GetResults(CBufferDouble *&result);
void SetLearningRates(double learning_rate, double beta1 = 0.9,
double beta2 = 0.999);
//--- 5B>4K DC=:F88 ?>B5@L
bool LossFunction(ENUM_LOSS_FUNCTION loss_function, double lambda1 = 0,
double lambda2 = 0);
ENUM_LOSS_FUNCTION LossFunction(void) const { return(m_cLossFunction.LossFunction());}
ENUM_LOSS_FUNCTION LossFunction(double &lambda1, double &lambda2);
double GetRecentAverageLoss(void) const { return(m_dNNLoss); }
void LossSmoothFactor(int value) { m_iLossSmoothFactor = value;}
int LossSmoothFactor(void) const { return(m_iLossSmoothFactor);}
//--- #?@02;5=85 @568<>< @01>BK <>45;8
bool TrainMode(void) const { return m_bTrainMode; }
void TrainMode(bool mode);
//--- 5B>4K @01>BK A D09;0<8
virtual bool Save(string file_name = NULL);
virtual bool Save(const int file_handle);
virtual bool Load(string file_name = NULL, bool common = false);
virtual bool Load(const int file_handle);
//--- 5B>4 845=B8D8:0F88 >1J5:B0
virtual int Type(void) const { return(defNeuronNet); }
//--- >;CG5=85 C:070B5;59 =0 2=CB@5==85 >1J5:BK
virtual CBufferDouble *GetGradient(uint layer) const;
virtual CBufferDouble *GetWeights(uint layer) const;
virtual CBufferDouble *GetDeltaWeights(uint layer) const;
virtual int GetGPTUnits(void);
};
//+------------------------------------------------------------------+
//| >=AB@C:B>@ :;0AA0 |
//+------------------------------------------------------------------+
CNet::CNet(void) : m_bTrainMode(false),
m_bOpenCL(false),
m_bPositionEncoder(false),
m_dNNLoss(-1),
m_iLossSmoothFactor(1000),
m_dLearningRate(3.0e-4)
{
ArrayInitialize(m_adLambda, 0);
ArrayInitialize(m_adBeta, 0);
m_cLayers = new CArrayLayers();
m_cOpenCL = new CMyOpenCL();
m_cPositionEncoder = new CPositionEncoder();
m_cLossFunction = new CLossFunction();
}
//+------------------------------------------------------------------+
//| 5AB@C:B>@ :;0AA0 |
//+------------------------------------------------------------------+
CNet::~CNet(void)
{
if(CheckPointer(m_cLayers) != POINTER_INVALID)
delete m_cLayers;
if(CheckPointer(m_cOpenCL) != POINTER_INVALID)
delete m_cOpenCL;
if(CheckPointer(m_cPositionEncoder) != POINTER_INVALID)
delete m_cPositionEncoder;
if(CheckPointer(m_cLossFunction) != POINTER_INVALID)
delete m_cLossFunction;
}
//+------------------------------------------------------------------+
//| 5B>4 8=8F80;870F88 :;0AA0 |
//+------------------------------------------------------------------+
bool CNet::Create(CArrayObj *descriptions)
{
//--- ;>: :>=B@>;59
if(CheckPointer(descriptions) == POINTER_INVALID)
return false;
//--- @>25@O5< :>;8G5AB2> A>740205<KE A;>Q2
int total = descriptions.Total();
if(total < 2)
return false;
//--- =8F80;878@C5< >1J5BK OpenCL
if(m_bOpenCL)
m_bOpenCL = InitOpenCL();
if(!m_cLayers.SetOpencl(m_cOpenCL))
m_bOpenCL = false;
//--- @30=87>2K205< F8:; 4;O A>740=8O =59@>==KE A;>Q2
for(int i = 0; i < total; i++)
{
CLayerDescription *temp = descriptions.At(i);
if(CheckPointer(temp) == POINTER_INVALID)
return false;
if(i == 0)
{
if(temp.type != defNeuronBase)
return false;
temp.window = 0;
}
else
{
CLayerDescription *prev = descriptions.At(i - 1);
if(temp.window <= 0 || temp.window > prev.count || temp.type == defNeuronBase)
{
switch(prev.type)
{
case defNeuronConv:
case defNeuronProof:
temp.window = prev.count * prev.window_out;
break;
default:
temp.window = prev.count;
break;
}
switch(temp.type)
{
case defNeuronAttention:
case defNeuronMHAttention:
case defNeuronGPT:
break;
default:
temp.step = 0;
}
}
}
if(!m_cLayers.CreateElement(i, temp))
return false;
}
//--- =8F80;878@C5< >1J5:BK ?>78F8>==>3> :>48@>20=8O
if(m_bPositionEncoder)
{
if(CheckPointer(m_cPositionEncoder) == POINTER_INVALID)
{
m_cPositionEncoder = new CPositionEncoder();
if(CheckPointer(m_cPositionEncoder) == POINTER_INVALID)
m_bPositionEncoder = false;
return true;
}
CLayerDescription *temp = descriptions.At(0);
if(!m_cPositionEncoder.InitEncoder(temp.count, temp.window))
UsePositionEncoder(false);
}
//---
return true;
}
//+------------------------------------------------------------------+
//| 5B>4 8=8F80;870F88 :;0AA0 |
//+------------------------------------------------------------------+
bool CNet::Create(CArrayObj *descriptions,
double learning_rate,
double beta1, double beta2,
ENUM_LOSS_FUNCTION loss_function,
double lambda1, double lambda2)
{
if(!Create(descriptions))
return false;
SetLearningRates(learning_rate, beta1, beta2);
if(!LossFunction(loss_function, lambda1, lambda2))
return false;
//---
return true;
}
//+------------------------------------------------------------------+
//| 5B>4 8=8F80;870F88 :;0AA0 |
//+------------------------------------------------------------------+
bool CNet::Create(CArrayObj *descriptions,
ENUM_LOSS_FUNCTION loss_function,
double lambda1, double lambda2)
{
if(!Create(descriptions))
return false;
if(!LossFunction(loss_function, lambda1, lambda2))
return false;
//---
return true;
}
//+------------------------------------------------------------------+
//| 5B>4 8=8F80;870F88 :;0AA0 |
//+------------------------------------------------------------------+
bool CNet::Create(CArrayObj *descriptions,
double learning_rate,
double beta1, double beta2)
{
if(!Create(descriptions))
return false;
SetLearningRates(learning_rate, beta1, beta2);
//---
return true;
}
//+------------------------------------------------------------------+
//| 5B>4 ?@O<>3> ?@>E>40 |
//+------------------------------------------------------------------+
bool CNet::FeedForward(CArrayDouble *inputs)
{
//--- ;>: :>=B@>;59
if(CheckPointer(inputs) == POINTER_INVALID)
{
PrintFormat("%s - %d", __FUNCTION__, __LINE__);
return false;
}
CNeuronBase *InputLayer = m_cLayers.At(0);
if(CheckPointer(InputLayer) == POINTER_INVALID)
{
PrintFormat("%s - %d", __FUNCTION__, __LINE__);
return false;
}
CBufferDouble *Inputs = InputLayer.GetOutputs();
if(CheckPointer(Inputs) == POINTER_INVALID)
{
PrintFormat("%s - %d", __FUNCTION__, __LINE__);
return false;
}
if(Inputs.Total() != inputs.Total())
{
PrintFormat("%s - %d", __FUNCTION__, __LINE__);
return false;
}
//--- 5@5=>A8< 8AE>4=K5 40==K5 2 =59@>==K9 A;>9
if(!Inputs.AssignArray(inputs))
{
PrintFormat("%s - %d", __FUNCTION__, __LINE__);
return false;
}
//--- @8<5=O5< ?>78F8>==>5 :>48@>20=858
if(m_bPositionEncoder && !m_cPositionEncoder.AddEncoder(Inputs))
{
PrintFormat("%s - %d", __FUNCTION__, __LINE__);
return false;
}
if(m_bOpenCL)
Inputs.BufferCreate(m_cOpenCL);
//--- @30=87>2K205< F8:; A ?>;=K< ?5@51>@>< 2A5E =59@>==KE A;>Q2
//--- 8 2K7>2>< <5B>40 ?@O<>3> ?@>E>40 4;O :064>3> 87 =8E
CNeuronBase *PrevLayer = InputLayer;
int total = m_cLayers.Total();
for(int i = 1; i < total; i++)
{
CNeuronBase *Layer = m_cLayers.At(i);
if(CheckPointer(Layer) == POINTER_INVALID)
{
PrintFormat("%s - %d Layer %d", __FUNCTION__, __LINE__, i);
return false;
}
if(!Layer.FeedForward(PrevLayer))
{
PrintFormat("%s - %d Layer %d", __FUNCTION__, __LINE__, i);
return false;
}
PrevLayer = Layer;
}
//---
return true;
}
//+------------------------------------------------------------------+
//| 5B>4 >@30=870F88 >1@0B=>3> ?@>E>40 |
//+------------------------------------------------------------------+
bool CNet::Backpropagation(CBufferDouble *target)
{
//--- ;>: :>=B@>;59
if(CheckPointer(target) == POINTER_INVALID)
return false;
int total = m_cLayers.Total();
CNeuronBase *Output = m_cLayers.At(total - 1);
if(CheckPointer(Output) == POINTER_INVALID)
return false;
//--- 0AG5B 7=0G5=8O DC=:F88 ?>B5@L
double loss = m_cLossFunction.CaclFunction(Output.GetOutputs(), target);
if(loss == DBL_MAX)
return false;
m_dNNLoss = (m_dNNLoss < 0 ? loss : m_dNNLoss + (loss - m_dNNLoss) / m_iLossSmoothFactor);
//--- 0AG5B 3@0485=B0 >H81:8 =0 2KE>45 =59@>==>9 A5B8
if(!Output.CalcOutputGradient(target))
{
return false;
}
//--- @30=87>2K205< F8:; A ?5@51>@>< 2A5E =59@>==KE A;>Q2 2 >1@0B=>< ?>@O4:5
for(int i = total - 2; i >= 0; i--)
{
CNeuronBase *temp = m_cLayers.At(i);
if(CheckPointer(temp) == POINTER_INVALID)
return false;
//--- K7K205< <5B>40 @0A?@545;5=8O 3@0485=B0 >H81:8 G5@57 A:@KBK9 A;>9
if(!Output.CalcHiddenGradient(temp))
return false;
//--- K7K205< <5B>4 @0A?@545;5=8O 3@0485=B0 >H81:8 4> <0B@8FK 25A>2
if(!Output.CalcDeltaWeights(temp))
return false;
Output = temp;
}
//---
return true;
}
//+------------------------------------------------------------------+
//| 5B>4 >1=>2;5=8O <0B@8F 25A>2KE :>MDD8F85=B>2 |
//+------------------------------------------------------------------+
bool CNet::UpdateWeights(uint batch_size = 1)
{
//--- ;>: :>=B@>;59
if(batch_size <= 0)
return false;
//--- @30=87>2K205< F8:; ?5@51>@0 2A5E A:@KBKE A;>Q2
int total = m_cLayers.Total();
for(int i = 1; i < total; i++)
{
//--- @>25@O5< 459AB28B5;L=>ABL C:070B5;O =0 >1J5:B =59@>==>3> A;>O
CNeuronBase *temp = m_cLayers.At(i);
if(CheckPointer(temp) == POINTER_INVALID)
return false;
//--- K7K205< <5B>4 >1=>2;5=8O <0B@8FK 25A>2 2=CB@5==53> A;>O
if(!temp.UpdateWeights(batch_size, m_dLearningRate, m_adBeta, m_adLambda))
return false;
}
//---
return true;
}
//+------------------------------------------------------------------+
//| 5B>4 ?>;CG5=8O @57C;LB0B0 ?@O<>3> ?@>E>40 |
//+------------------------------------------------------------------+
bool CNet::GetResults(CBufferDouble *&result)
{
int total = m_cLayers.Total();
CNeuronBase *temp = m_cLayers.At(total - 1);
if(CheckPointer(temp) == POINTER_INVALID)
return false;
if(CheckPointer(result) == POINTER_INVALID)
{
result = new CBufferDouble();
if(CheckPointer(result) == POINTER_INVALID)
return false;
}
if(!result.AssignArray(temp.GetOutputs()))
return false;
//---
return true;
}
//+------------------------------------------------------------------+
//| 5B>4 A>E@0=5=85 M;5<5=B>2 :;0AA0 2 D09; |
//+------------------------------------------------------------------+
bool CNet::Save(string file_name = NULL)
{
//--- ;>: :>=B@>;59
if(file_name == NULL || file_name == "")
file_name = defFileName;
//--- :@K205< D09; 4;O 70?8A8
int handle = FileOpen(file_name, FILE_WRITE | FILE_BIN);
//--- K7K205< <5B>4 A>E@0=5=8O :;0AA0 ?> E5=4;C D09;0
bool result = Save(handle);
//--- 0:@K205< >B:@KBK9 D09;
FileClose(handle);
//---
return result;
}
//+------------------------------------------------------------------+
//| 5B>4 A>E@0=5=85 M;5<5=B>2 :;0AA0 2 D09; |
//+------------------------------------------------------------------+
bool CNet::Save(const int file_handle)
{
//--- ;>: :>=B@>;59
if(file_handle == INVALID_HANDLE || CheckPointer(m_cLossFunction) == POINTER_INVALID ||
CheckPointer(m_cLayers) == POINTER_INVALID)
return false;
//--- !>E@0=O5< :>=AB0=BK
if(!FileWriteInteger(file_handle, (int)m_bOpenCL) ||
!FileWriteDouble(file_handle, m_dNNLoss) ||
!FileWriteInteger(file_handle, m_iLossSmoothFactor) ||
!FileWriteInteger(file_handle, (int)m_bPositionEncoder) ||
!FileWriteDouble(file_handle, m_dLearningRate) ||
!FileWriteArray(file_handle, m_adBeta, 0) ||
!FileWriteArray(file_handle, m_adLambda, 0) ||
!FileWriteInteger(file_handle, (int)m_cLossFunction.LossFunction()))
return false;
//--- !>E@0=O5< >1J5:B ?>78F8>==>3> :>48@>20=8O ?@8 =5>1E>48<>AB8
if(m_bPositionEncoder)
{
if(CheckPointer(m_cPositionEncoder) == POINTER_INVALID ||
!m_cPositionEncoder.Save(file_handle))
return false;
}
//--- K7K205< <5B>4 A>E@0=5=8O 40==KE 48=0<8G5A:>3> <0AA820 =59@>==KE A;>Q2
return m_cLayers.Save(file_handle);
}
//+------------------------------------------------------------------+
//| 5B>4 2>AAB0=>2;5=8O :;0AA0 87 A>E@0=Q==KE 40==KE |
//+------------------------------------------------------------------+
bool CNet::Load(string file_name = NULL, bool common = false)
{
//--- ;>: :>=B@>;59
string path = TerminalInfoString(TERMINAL_COMMONDATA_PATH);
if(!FileIsExist(file_name, (common ? FILE_COMMON : 0)))
file_name = defFileName;
//--- B:@K205< D09; 8 2K7K205< <5B>4 703@C7:8 40==KE ?> E5=4;C D09;0
int handle = FileOpen(file_name, FILE_READ | FILE_BIN | FILE_SHARE_READ | (common ? FILE_COMMON : 0));
bool result = Load(handle);
FileClose(handle);
//---
return result;
}
//+------------------------------------------------------------------+
//| 5B>4 2>AAB0=>2;5=8O :;0AA0 87 A>E@0=Q==KE 40==KE |
//+------------------------------------------------------------------+
bool CNet::Load(const int file_handle)
{
//--- ;>: :>=B@>;59
if(file_handle == INVALID_HANDLE)
return false;
//--- !G8BK205< :>=AB0=BK
m_bOpenCL = (bool)FileReadInteger(file_handle);
m_dNNLoss = FileReadDouble(file_handle);
m_iLossSmoothFactor = FileReadInteger(file_handle);
m_bPositionEncoder = (bool)FileReadInteger(file_handle);
m_dLearningRate = FileReadDouble(file_handle);
if(FileReadArray(file_handle, m_adBeta, 0) < 2 ||
FileReadArray(file_handle, m_adLambda, 0) < 2)
return false;
ENUM_LOSS_FUNCTION loss = (ENUM_LOSS_FUNCTION) FileReadInteger(file_handle);
//--- 03@C605< >1J5:B ?>78F8>==>3> :>48@>20=8O
if(m_bPositionEncoder)
{
if(CheckPointer(m_cPositionEncoder) == POINTER_INVALID)
{
m_cPositionEncoder = new CPositionEncoder();
if(CheckPointer(m_cPositionEncoder) == POINTER_INVALID)
return false;
}
if(!m_cPositionEncoder.Load(file_handle))
return false;
}
//--- =8F80;878@C5< >1J5:B DC=:F88 ?>B5@L
if(CheckPointer(m_cLossFunction) == POINTER_INVALID)
{
m_cLossFunction = new CLossFunction();
if(CheckPointer(m_cLossFunction) == POINTER_INVALID)
return false;
}
m_cLossFunction.LossFunction(loss);
//--- =8F80;878@C5< >1J5:B @01>BK A OpenCL
if(m_bOpenCL)
{
if(!InitOpenCL())
m_bOpenCL = false;
}
else
if(CheckPointer(m_cOpenCL) != POINTER_INVALID)
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
}
//--- =8F80;878@C5< 8 703@C605< 40==K5 48=0<8G5A:>3> <0AA820 =59@>==KE A;>Q2
if(CheckPointer(m_cLayers) != POINTER_INVALID)
delete m_cLayers;
m_cLayers = new CArrayLayers(file_handle);
if(CheckPointer(m_cLayers) == POINTER_INVALID)
return false;
if(m_bOpenCL)
m_cLayers.SetOpencl(m_cOpenCL);
//---
return m_cLayers.Load(file_handle);
}
//+------------------------------------------------------------------+
//| 5B>4 8=8F80;870F88 >1J5:B>2 @01>BK A OpenCL |
//+------------------------------------------------------------------+
bool CNet::InitOpenCL(void)
{
//--- #40;O5< A>740==K5 @0=55 >1J5:BK OpenCL
if(CheckPointer(m_cOpenCL) != POINTER_INVALID)
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
}
//--- !>740Q< =>2K9 >1J5:B 4;O @01>BK A OpenCL
m_cOpenCL = new CMyOpenCL();
if(CheckPointer(m_cOpenCL) == POINTER_INVALID)
return false;
//--- =8F80;878@C5< >1J5:B @01>BK A OpenCL
if(!m_cOpenCL.Initialize(cl_program, true))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.SetKernelsCount(29))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.SetBuffersCount(10))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
//--- =8F80;878@C5< :5@=5;K OpenCL
if(!m_cOpenCL.KernelCreate(def_k_PerceptronFeedForward, "PerceptronFeedForward"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_Normalize, "Normalize"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_CalcOutputGradient, "CalcOutputGradient"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_DeActivateGradient, "DeActivateGradient"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_CalcHiddenGradient, "CalcHiddenGradient"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_CalcDeltaWeights, "CalcDeltaWeights"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_SGDUpdate, "SGDUpdate"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_MomentumUpdate, "MomentumUpdate"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_AdaGradUpdate, "AdaGradUpdate"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_RMSPropUpdate, "RMSPropUpdate"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_AdaDeltaUpdate, "AdaDeltaUpdate"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_AdamUpdate, "AdamUpdate"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_ProofFeedForward, "ProofFeedForward"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_ProofHiddenGradients, "ProofCalcHiddenGradient"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_ConvolutionFeedForward, "ConvolutionFeedForward"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_ConvolutionHiddenGradients, "ConvolutionCalcHiddenGradient"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_ConvolutionDeltaWeights, "ConcolutionCalcDeltaWeights"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_LSTMFeedForward, "LSTMFeedForward"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_LSTMHiddenGradients, "LSTMCalcHiddenGradient"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_AttentionFeedForward, "AttentionFeedForward"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_AttentionScoreGradients, "AttentionCalcScoreGradient"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_AttentionHiddenGradients, "AttentionCalcHiddenGradient"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_GPTFeedForward, "GPTFeedForward"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_GPTScoreGradients, "GPTCalcScoreGradient"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_GPTHiddenGradients, "GPTCalcHiddenGradient"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_BatchNormFeedForward, "BatchNormFeedForward"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_BatchNormCalcHiddenGradient, "BatchNormCalcHiddenGradient"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_BatchNormCalcDeltaWeights, "BatchNormCalcDeltaWeights"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
if(!m_cOpenCL.KernelCreate(def_k_MaskMult, "MaskMult"))
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
return false;
}
//---
return true;
}
//+------------------------------------------------------------------+
//| 5B>4 ?5@540G8 C:070B5;O =0 >1J5:B OpenCL 4> 2A5E |
//| 2=CB@5==8E >1J5:B>2 |
//+------------------------------------------------------------------+
void CNet::UseOpenCL(bool value)
{
if(!value)
{
if(CheckPointer(m_cOpenCL) == POINTER_INVALID)
{
m_bOpenCL = value;
return;
}
m_cOpenCL.Shutdown();
delete m_cOpenCL;
if(CheckPointer(m_cLayers) != POINTER_INVALID)
m_cLayers.SetOpencl(m_cOpenCL);
m_bOpenCL = value;
return;
}
//---
if(CheckPointer(m_cOpenCL) != POINTER_INVALID)
{
m_cOpenCL.Shutdown();
delete m_cOpenCL;
}
m_bOpenCL = InitOpenCL();
if(CheckPointer(m_cLayers) != POINTER_INVALID)
m_cLayers.SetOpencl(m_cOpenCL);
return;
}
//+------------------------------------------------------------------+
//| 5B>4 CAB0=>2:8 ?0@0<5B@>2 >1CG5=8O |
//+------------------------------------------------------------------+
void CNet::SetLearningRates(double learning_rate, double beta1 = 0.900000, double beta2 = 0.999000)
{
m_dLearningRate = learning_rate;
m_adBeta[0] = beta1;
m_adBeta[1] = beta2;
}
//+------------------------------------------------------------------+
//| 5B>4 CAB0=>2:8 DC=:F88 ?>B5@L |
//+------------------------------------------------------------------+
bool CNet::LossFunction(ENUM_LOSS_FUNCTION loss_function, double lambda1 = 0.000000, double lambda2 = 0.000000)
{
if(CheckPointer(m_cLossFunction) == POINTER_INVALID)
{
m_cLossFunction = new CLossFunction();
if(CheckPointer(m_cLossFunction) == POINTER_INVALID)
return false;
}
m_cLossFunction.LossFunction(loss_function);
m_adLambda[0] = lambda1;
m_adLambda[1] = lambda2;
return true;
}
//+------------------------------------------------------------------+
//| 5B>4 ?>;CG5=88O C:070B5;O =0 1CD5@ 3@0485=B>2 ?> =><5@C A;>O |
//+------------------------------------------------------------------+
CBufferDouble *CNet::GetGradient(uint layer) const
{
if(layer >= (uint)m_cLayers.Total())
return NULL;
//---
CNeuronBase *l = m_cLayers.At(layer);
return l.GetGradients();
}
//+------------------------------------------------------------------+
//| 5B>4 ?>;CG5=8O C:070B5;O =0 <0B@8FC 25A>2 ?> =><5@C A;>O |
//+------------------------------------------------------------------+
CBufferDouble *CNet::GetWeights(uint layer) const
{
if(layer >= (uint)m_cLayers.Total())
return NULL;
//---
CNeuronBase *l = m_cLayers.At(layer);
return l.GetWeights();
}
//+------------------------------------------------------------------+
//| 5B>4 ?>;CG5=8O C:070B5;O =0 1CD5@ =0:>?;5==KE 3@0485=B>2 |
//| >H81:8 =0 C@>2=5 <0B@8FK 25A>2 ?> =><5@C A;>O |
//+------------------------------------------------------------------+
CBufferDouble *CNet::GetDeltaWeights(uint layer)const
{
if(layer >= (uint)m_cLayers.Total())
return NULL;
//---
CNeuronBase *l = m_cLayers.At(layer);
return l.GetDeltaWeights();
}
//+------------------------------------------------------------------+
//| #AB0=>2:0 @568<0 @01>BK <>45;8 |
//+------------------------------------------------------------------+
void CNet::TrainMode(bool mode)
{
m_bTrainMode = mode;
int total = m_cLayers.Total();
for(int i = 0; i < total; i++)
{
if(CheckPointer(m_cLayers.At(i)) == POINTER_INVALID)
continue;
CNeuronBase *temp = m_cLayers.At(i);
temp.TrainMode(mode);
}
}
//+------------------------------------------------------------------+
//| 5B>4 ?>;CG5=8O 3;C18=K 8A?>;L7C5<KE 1;>:>2 GPT |
//+------------------------------------------------------------------+
int CNet::GetGPTUnits(void)
{
int result = 0;
if(CheckPointer(m_cLayers) == POINTER_INVALID)
return result;
int total = m_cLayers.Total();
for(int i = 0; i < total; i++)
{
if(CheckPointer(m_cLayers.At(i)) == POINTER_INVALID)
continue;
if(m_cLayers.At(i).Type() == defNeuronGPT)
{
CNeuronGPT *temp = m_cLayers.At(i);
result += temp.GetUnits() * temp.GetLayers();
}
if(m_cLayers.At(i).Type() == defNeuronLSTM)
{
CNeuronLSTM *temp = m_cLayers.At(i);
result += temp.GetDepth();
}
}
//---
return result;
}
//+------------------------------------------------------------------+
//| 5B>4 CAB>=>2:8 D;030 8A?>;L7>20=8O ?>78F8>==>3> :>48@>20=8O |
//+------------------------------------------------------------------+
void CNet::UsePositionEncoder(bool value)
{
m_bPositionEncoder = value;
if(!m_bPositionEncoder)
{
if(CheckPointer(m_cPositionEncoder) != POINTER_INVALID)
delete m_cPositionEncoder;
return;
}
//---
if(CheckPointer(m_cPositionEncoder) == POINTER_INVALID)
m_cPositionEncoder = new CPositionEncoder();
if(CheckPointer(m_cLayers) == POINTER_INVALID || m_cLayers.Total() < 1)
return;
CNeuronBase *temp = m_cLayers.At(0);
if(CheckPointer(temp) == POINTER_INVALID)
return;
if(!m_cPositionEncoder.InitEncoder(1, temp.GetOutputs().Total()))
UsePositionEncoder(false);
//---
return;
}
//+------------------------------------------------------------------+