Original_NNB/MQL5/Include/NeuroNetworksBook/realization/neuronattention.mqh
super.admin e81e22b7b8 convert
2025-05-30 16:15:14 +02:00

855 lines
65 KiB
MQL5

//+------------------------------------------------------------------+
//| NeuronAttention.mqh |
//| Copyright 2021, MetaQuotes Ltd. |
//| https://www.mql5.com |
//+------------------------------------------------------------------+
#property copyright "Copyright 2021, MetaQuotes Ltd."
#property link "https://www.mql5.com"
//+------------------------------------------------------------------+
//| Подключаем библиотеки |
//+------------------------------------------------------------------+
#include "defines.mqh"
#include "neuronconv.mqh"
#include <Math\Stat\Math.mqh>
//+------------------------------------------------------------------+
//| Class CNeuronAttention |
//| Назначение: Класс блока внимания Self-Attention |
//+------------------------------------------------------------------+
class CNeuronAttention : public CNeuronBase
{
protected:
CNeuronConv *m_cQuerys;
CNeuronConv *m_cKeys;
CNeuronConv *m_cValues;
CBufferDouble *m_cScores;
CBufferDouble *m_cScoreGrad;
CBufferDouble *m_cScoreTemp;
CNeuronBase *m_cAttentionOut;
CNeuronConv *m_cFF1;
CNeuronConv *m_cFF2;
//---
int m_iWindow;
int m_iUnits;
int m_iKeysSize;
double m_dStd[2];
public:
CNeuronAttention(void);
~CNeuronAttention(void);
//---
virtual bool Init(CLayerDescription *description);
virtual bool SetOpenCL(CMyOpenCL *opencl);
virtual bool FeedForward(CNeuronBase *prevLayer);
virtual bool CalcHiddenGradient(CNeuronBase *prevLayer);
virtual bool CalcDeltaWeights(CNeuronBase *prevLayer);
virtual bool UpdateWeights(int batch_size, double learningRate,
double &Beta[], double &Lambda[]);
//--- Методы работы с файлами
virtual bool Save(const int file_handle);
virtual bool Load(const int file_handle);
//--- Метод идентификации объекта
virtual int Type(void) const { return(defNeuronAttention); }
};
//+------------------------------------------------------------------+
//| Конструктор класса |
//+------------------------------------------------------------------+
CNeuronAttention::CNeuronAttention(void) : m_iWindow(1),
m_iUnits(0),
m_iKeysSize(1)
{
m_cQuerys = new CNeuronConv;
m_cKeys = new CNeuronConv;
m_cValues = new CNeuronConv;
m_cScores = new CBufferDouble;
m_cAttentionOut = new CNeuronBase();
m_cFF1 = new CNeuronConv;
m_cFF2 = new CNeuronConv;
ArrayInitialize(m_dStd, 1);
}
//+------------------------------------------------------------------+
//| Деструктор класса |
//+------------------------------------------------------------------+
CNeuronAttention::~CNeuronAttention(void)
{
if(CheckPointer(m_cQuerys) != POINTER_INVALID)
delete m_cQuerys;
if(CheckPointer(m_cKeys) != POINTER_INVALID)
delete m_cKeys;
if(CheckPointer(m_cValues) != POINTER_INVALID)
delete m_cValues;
if(CheckPointer(m_cScores) != POINTER_INVALID)
delete m_cScores;
if(CheckPointer(m_cScoreGrad) != POINTER_INVALID)
delete m_cScoreGrad;
if(CheckPointer(m_cScoreTemp) != POINTER_INVALID)
delete m_cScoreTemp;
if(CheckPointer(m_cAttentionOut) != POINTER_INVALID)
delete m_cAttentionOut;
if(CheckPointer(m_cFF1) != POINTER_INVALID)
delete m_cFF1;
if(CheckPointer(m_cFF2) != POINTER_INVALID)
delete m_cFF2;
}
//+------------------------------------------------------------------+
//| Метод инициализации класса |
//+------------------------------------------------------------------+
bool CNeuronAttention::Init(CLayerDescription *description)
{
//--- Проверяем исходные данные
if(CheckPointer(description) == POINTER_INVALID || description.type != Type() ||
description.count <= 0 || description.window <= 0 || description.window_out <= 0)
return false;
//---
m_iWindow = description.window;
m_iUnits = description.count;
m_iKeysSize = description.window_out;
//--- Создаём описание для внутренних нейронных слоёв
CLayerDescription *temp = new CLayerDescription();
if(CheckPointer(temp) == POINTER_INVALID)
return false;
temp.type = defNeuronConv;
temp.window = description.window;
temp.window_out = m_iKeysSize;
temp.step = description.window;
temp.count = description.count;
temp.activation = ACT_None;
temp.activation_params[0] = 1;
temp.activation_params[1] = 0;
temp.optimization = description.optimization;
//--- Вызываем метод инициализации родительского класса
description.count *= description.window;
description.window_out = 1;
description.window = 0;
if(!CNeuronBase::Init(description))
{
delete temp;
return false;
}
//--- Инициализируем Querys
if(CheckPointer(m_cQuerys) == POINTER_INVALID)
{
m_cQuerys = new CNeuronConv();
if(CheckPointer(m_cQuerys) == POINTER_INVALID)
{
delete temp;
return false;
}
}
if(!m_cQuerys.Init(temp))
{
delete temp;
return false;
}
m_cQuerys.SetTransposedOutput(true);
//--- Инициализируем Keys
if(CheckPointer(m_cKeys) == POINTER_INVALID)
{
m_cKeys = new CNeuronConv();
if(CheckPointer(m_cKeys) == POINTER_INVALID)
{
delete temp;
return false;
}
}
if(!m_cKeys.Init(temp))
{
delete temp;
return false;
}
m_cKeys.SetTransposedOutput(true);
//--- Инициализируем Values
if(CheckPointer(m_cValues) == POINTER_INVALID)
{
m_cValues = new CNeuronConv();
if(CheckPointer(m_cValues) == POINTER_INVALID)
{
delete temp;
return false;
}
}
temp.window_out = m_iWindow;
if(!m_cValues.Init(temp))
{
delete temp;
return false;
}
m_cValues.SetTransposedOutput(true);
//--- Инициализируем Scores
if(CheckPointer(m_cScores) == POINTER_INVALID)
{
m_cScores = new CBufferDouble();
if(CheckPointer(m_cScores) == POINTER_INVALID)
{
delete temp;
return false;
}
}
if(!m_cScores.BufferInit(temp.count * temp.count, 0))
{
delete temp;
return false;
}
//--- Инициализируем AttentionOut
if(CheckPointer(m_cAttentionOut) == POINTER_INVALID)
{
m_cAttentionOut = new CNeuronBase();
if(CheckPointer(m_cAttentionOut) == POINTER_INVALID)
{
delete temp;
return false;
}
}
description.type = defNeuronBase;
if(!m_cAttentionOut.Init(description))
{
delete temp;
return false;
}
//--- Инициализируем FF1
if(CheckPointer(m_cFF1) == POINTER_INVALID)
{
m_cFF1 = new CNeuronConv();
if(CheckPointer(m_cFF1) == POINTER_INVALID)
{
delete temp;
return false;
}
}
temp.window_out *= 4;
temp.activation = ACT_SWISH;
temp.activation_params[0] = 1;
temp.activation_params[1] = 0;
if(!m_cFF1.Init(temp))
{
delete temp;
return false;
}
m_cFF1.SetTransposedOutput(true);
//--- Инициализируем FF2
if(CheckPointer(m_cFF2) == POINTER_INVALID)
{
m_cFF2 = new CNeuronConv();
if(CheckPointer(m_cFF2) == POINTER_INVALID)
{
delete temp;
return false;
}
}
temp.window = temp.window_out;
temp.window_out = temp.step;
temp.step = temp.window;
temp.activation = ACT_None;//description.activation;
temp.activation_params[0] = 1;//description.activation_params[0];
temp.activation_params[1] = 0;//description.activation_params[1];
if(!m_cFF2.Init(temp))
{
delete temp;
return false;
}
m_cFF2.SetTransposedOutput(true);
delete temp;
//--- Для исключениия копирования буферов осуществим их подмену
if(CheckPointer(m_cOutputs) != POINTER_INVALID)
delete m_cOutputs;
m_cOutputs = m_cFF2.GetOutputs();
if(CheckPointer(m_cGradients) != POINTER_INVALID)
delete m_cGradients;
m_cGradients = m_cFF2.GetGradients();
//--- Передаём указатель на объект работы с OpenCL до всех внутренних объектов
SetOpenCL(m_cOpenCL);
//---
return true;
}
//+------------------------------------------------------------------+
//| Метод передачи указателя на объект OpenCL до всех внутренних |
//| объектов класса |
//+------------------------------------------------------------------+
bool CNeuronAttention::SetOpenCL(CMyOpenCL *opencl)
{
CNeuronBase::SetOpenCL(opencl);
if(CheckPointer(m_cQuerys) != POINTER_INVALID)
m_cQuerys.SetOpenCL(m_cOpenCL);
if(CheckPointer(m_cKeys) != POINTER_INVALID)
m_cKeys.SetOpenCL(m_cOpenCL);
if(CheckPointer(m_cValues) != POINTER_INVALID)
m_cValues.SetOpenCL(m_cOpenCL);
if(CheckPointer(m_cFF1) != POINTER_INVALID)
m_cFF1.SetOpenCL(m_cOpenCL);
if(CheckPointer(m_cFF2) != POINTER_INVALID)
m_cFF2.SetOpenCL(m_cOpenCL);
//---
return(CheckPointer(m_cOpenCL) != POINTER_INVALID);
}
//+------------------------------------------------------------------+
//| Метод прямого прохода |
//+------------------------------------------------------------------+
bool CNeuronAttention::FeedForward(CNeuronBase *prevLayer)
{
//--- Проверяем актуальность всех объектов
if(CheckPointer(prevLayer) == POINTER_INVALID ||
CheckPointer(prevLayer.GetOutputs()) == POINTER_INVALID ||
CheckPointer(m_cQuerys) == POINTER_INVALID ||
CheckPointer(m_cValues) == POINTER_INVALID ||
CheckPointer(m_cKeys) == POINTER_INVALID ||
CheckPointer(m_cFF1) == POINTER_INVALID ||
CheckPointer(m_cFF2) == POINTER_INVALID)
return false;
//---
if(!m_cQuerys.FeedForward(prevLayer))
return false;
if(!m_cKeys.FeedForward(prevLayer))
return false;
if(!m_cValues.FeedForward(prevLayer))
return false;
//--- Инициализируем Scores
if(CheckPointer(m_cScores) == POINTER_INVALID)
{
m_cScores = new CBufferDouble();
if(CheckPointer(m_cScores) == POINTER_INVALID)
return false;
}
//--- Инициализируем AttentionOut
if(CheckPointer(m_cAttentionOut) == POINTER_INVALID)
{
m_cAttentionOut = new CNeuronBase();
if(CheckPointer(m_cAttentionOut) == POINTER_INVALID)
return false;
CLayerDescription *temp = new CLayerDescription();
if(CheckPointer(temp) == POINTER_INVALID)
return false;
temp.type = defNeuronBase;
temp.count = m_cOutputs.Total();
temp.window = 0;
if(!m_cAttentionOut.Init(temp))
{
delete temp;
return false;
}
delete temp;
}
//--- Разветвление алгоритма по вычислительному устройству
double summs[];
int total = m_cOutputs.Total();
if(CheckPointer(m_cOpenCL) == POINTER_INVALID)
{
CBufferDouble *querys = m_cQuerys.GetOutputs();
CBufferDouble *keys = m_cKeys.GetOutputs();
//--- Определяем Scores
double scores[];
if(ArrayResize(scores, m_iUnits * m_iUnits) <= 0 ||
ArrayResize(summs, m_iUnits) <= 0)
return false;
for(int query = 0; query < m_iUnits; query++)
{
summs[query] = 0;
for(int key = 0; key < m_iUnits; key++)
{
int shift = query * m_iUnits + key;
scores[shift] = 0;
for(int i = 0; i < m_iKeysSize; i++)
scores[shift] += querys.At(query * m_iKeysSize + i) * keys.At(key * m_iKeysSize + i);
scores[shift] = MathExp(scores[shift] / MathSqrt(m_iKeysSize));
summs[query] += scores[shift];
}
}
//--- Нормализуем Scores
for(int query = 0; query < m_iUnits; query++)
{
for(int key = 0; key < m_iUnits; key++)
scores[query * m_iUnits + key] /= summs[query];
}
if(!m_cScores.AssignArray(scores))
return false;
//--- Выход блока внимания
if(ArrayResize(summs, total) < total)
return false;
if(ArrayInitialize(summs, 0) < total)
return false;
CBufferDouble *values = m_cValues.GetOutputs();
for(int value = 0; value < m_iUnits; value++)
{
for(int pos = 0; pos < m_iWindow; pos++)
{
double val = values.At(value * m_iWindow + pos);
for(int query = 0; query < m_iUnits; query++)
summs[query * m_iWindow + pos] += val * scores[query * m_iUnits + value];
}
}
//--- Суммируем с исходными данными и нормализуем
double mean = 0;
CBufferDouble *prev = prevLayer.GetOutputs();
for(int i = 0; i < total; i++)
{
summs[i] += prev.At(i);
mean += summs[i];
}
mean /= total;
m_dStd[0] = MathStandardDeviation(summs);
for(int i = 0; i < total; i++)
summs[i] = (summs[i] - mean) / m_dStd[0];
if(!m_cAttentionOut.GetOutputs().AssignArray(summs))
return false;
}
else // Блок OpenCL
{
//--- Создание буферов данных
if(m_cQuerys.GetOutputs().GetIndex() < 0 && !m_cQuerys.GetOutputs().BufferCreate(m_cOpenCL))
return false;
if(m_cKeys.GetOutputs().GetIndex() < 0 && !m_cKeys.GetOutputs().BufferCreate(m_cOpenCL))
return false;
if(m_cValues.GetOutputs().GetIndex() < 0 && !m_cValues.GetOutputs().BufferCreate(m_cOpenCL))
return false;
if(m_cScores.GetIndex() < 0 && !m_cScores.BufferCreate(m_cOpenCL))
return false;
if(m_cAttentionOut.GetOutputs().GetIndex() < 0 && !m_cAttentionOut.GetOutputs().BufferCreate(m_cOpenCL))
return false;
//--- Передача параметров кернелу
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionFeedForward, def_attff_keys, m_cKeys.GetOutputs().GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionFeedForward, def_attff_outputs, m_cAttentionOut.GetOutputs().GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionFeedForward, def_attff_querys, m_cQuerys.GetOutputs().GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionFeedForward, def_attff_scores, m_cScores.GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionFeedForward, def_attff_values, m_cValues.GetOutputs().GetIndex()))
return false;
if(!m_cOpenCL.SetArgument(def_k_AttentionFeedForward, def_attff_key_size, m_iKeysSize))
return false;
if(!m_cOpenCL.SetArgument(def_k_AttentionFeedForward, def_attff_window, m_iWindow))
return false;
if(!m_cOpenCL.SetArgument(def_k_AttentionFeedForward, def_attff_mask, 0))
return false;
//--- Постановка кернела в очередь выполнения
int off_set[] = {0,0};
int NDRange[] = {m_iUnits,1};
if(!m_cOpenCL.Execute(def_k_AttentionFeedForward, 2, off_set, NDRange))
return false;
//--- Считываниие результатов операций
if(!m_cAttentionOut.GetOutputs().GetData(summs, true))
return false;
if(!m_cScores.BufferRead())
return false;
m_cQuerys.GetOutputs().BufferFree();
m_cKeys.GetOutputs().BufferFree();
m_cValues.GetOutputs().BufferFree();
m_cScores.BufferFree();
prevLayer.GetOutputs().BufferFree();
//--- Суммируем с исходными данными и нормализуем
double mean = 0;
CBufferDouble *prev = prevLayer.GetOutputs();
for(int i = 0; i < total; i++)
{
summs[i] += prev.At(i);
mean += summs[i];
}
mean /= total;
m_dStd[0] = MathStandardDeviation(summs);
for(int i = 0; i < total; i++)
summs[i] = (summs[i] - mean) / m_dStd[0];
if(!m_cAttentionOut.GetOutputs().AssignArray(summs))
return false;
if(!m_cAttentionOut.GetOutputs().BufferWrite())
m_cAttentionOut.GetOutputs().BufferFree();
}
//--- Вызываем методы прямого прохода слоев блока Feed Forward
if(!m_cFF1.FeedForward(m_cAttentionOut))
return false;
if(!m_cFF2.FeedForward(m_cFF1))
return false;
//--- Суммируем с выходом внимания и нормализуем
double mean = 0;
for(int i = 0; i < total; i++)
{
summs[i] += m_cOutputs.At(i);
mean += summs[i];
}
mean /= total;
m_dStd[1] = MathStandardDeviation(summs);
for(int i = 0; i < total; i++)
summs[i] = (summs[i] - mean) / m_dStd[1];
if(!m_cOutputs.AssignArray(summs))
return false;
//---
return true;
}
//+------------------------------------------------------------------+
//| Метод проведения градиента через скрытый слой |
//+------------------------------------------------------------------+
bool CNeuronAttention::CalcHiddenGradient(CNeuronBase *prevLayer)
{
//--- Проверяем актуальность всех объектов
if(CheckPointer(m_cOutputs) == POINTER_INVALID ||
CheckPointer(m_cGradients) == POINTER_INVALID ||
CheckPointer(m_cScores) == POINTER_INVALID ||
CheckPointer(m_cFF2) == POINTER_INVALID ||
CheckPointer(m_cQuerys) == POINTER_INVALID ||
CheckPointer(m_cKeys) == POINTER_INVALID ||
CheckPointer(m_cValues) == POINTER_INVALID ||
m_cOutputs.Total() != m_cGradients.Total())
return false;
//--- Контроль сначений констант
if(m_dStd[1]!=0 && m_cGradients.Scaling(1/m_dStd[1])<=0)
return false;
//--- Проволим градиент через слоя блока Feed Forward
if(!m_cFF2.CalcHiddenGradient(m_cFF1))
return false;
if(!m_cFF1.CalcHiddenGradient(m_cAttentionOut))
return false;
CBufferDouble *attention_grad = m_cAttentionOut.GetGradients();
int total=m_cOutputs.Total();
if(!attention_grad.SumArray(m_cGradients))
return false;
if(m_dStd[0]!=0 && attention_grad.Scaling(1/m_dStd[0])<=0)
return false;
//--- Разветвление алгоритма по вычислительному устройству
if(CheckPointer(m_cOpenCL) == POINTER_INVALID)
{
double values[];
double gradients[];
if(ArrayResize(values, total) < total || attention_grad.GetData(gradients, false) < total)
return false;
if(ArrayInitialize(values, 0) < total)
return false;
//--- Распределение градиента на Values
for(int value = 0; value < m_iUnits; value++)
{
for(int grad = 0; grad < m_iUnits; grad++)
{
double score = m_cScores.At(grad * m_iUnits + value);
for(int i = 0; i < m_iWindow; i++)
values[value * m_iWindow + i] += gradients[grad * m_iWindow + i] * score;
}
}
if(!m_cValues.GetGradients().AssignArray(values))
return false;
//--- Распределение градиента на Querys и Keys
if(m_cValues.GetOutputs().GetData(values, false) <= 0)
return false;
double querys[], querys_grad[];
double keys[], keys_grad[];
int keys_total = m_iUnits * m_iKeysSize;
if(m_cQuerys.GetOutputs().GetData(querys, false) < keys_total)
return false;
if(m_cKeys.GetOutputs().GetData(keys, false) < keys_total)
return false;
if(ArrayResize(querys_grad, keys_total) <= 0 || ArrayResize(keys_grad, keys_total) <= 0)
return false;
if(ArrayInitialize(querys_grad, 0) <= 0 || ArrayInitialize(keys_grad, 0) <= 0)
return false;
for(int q = 0; q < m_iUnits; q++)
{
double score_grad[];
if(ArrayResize(score_grad, m_iUnits) <= 0)
return false;
if(ArrayInitialize(score_grad, 0) <= 0)
return false;
for(int k = 0; k < m_iUnits; k++)
{
for(int i = 0; i < m_iWindow; i++)
score_grad[k] += gradients[q * m_iWindow + i] * values[k * m_iWindow + i];
}
for(int k = 0; k < m_iUnits; k++)
{
double score = m_cScores.At(q * m_iUnits + k);
double grad = 0;
for(int i = 0; i < m_iUnits; i++)
grad += m_cScores.At(q * m_iUnits + i) * ((int)(i == k) - score) * score_grad[i];
grad /= MathSqrt(m_iKeysSize);
//---
for(int i = 0; i < m_iKeysSize; i++)
{
querys_grad[q * m_iKeysSize + i] += grad * keys[k * m_iKeysSize + i];
keys_grad[k * m_iKeysSize + i ] += grad * querys[q * m_iKeysSize + i];
}
}
}
if(!m_cQuerys.GetGradients().AssignArray(querys_grad) || !m_cKeys.GetGradients().AssignArray(keys_grad))
return false;
}
else // Блок OpenCL
{
//--- Создание буферов данных
if(m_cValues.GetOutputs().GetIndex() < 0 && !m_cValues.GetOutputs().BufferCreate(m_cOpenCL))
return false;
if(m_cValues.GetGradients().GetIndex() < 0 && !m_cValues.GetGradients().BufferCreate(m_cOpenCL))
return false;
if(m_cScores.GetIndex() < 0 && !m_cScores.BufferCreate(m_cOpenCL))
return false;
if(m_cAttentionOut.GetGradients().GetIndex() < 0 && !m_cAttentionOut.GetGradients().BufferCreate(m_cOpenCL))
return false;
if(CheckPointer(m_cScoreGrad) == POINTER_INVALID)
{
m_cScoreGrad = new CBufferDouble();
if(CheckPointer(m_cScoreGrad) == POINTER_INVALID)
return false;
}
if(m_cScoreGrad.Total() != m_cScores.Total())
if(!m_cScoreGrad.BufferInit(m_cScores.Total(), 0))
return false;
if(m_cScoreGrad.GetIndex() < 0 && !m_cScoreGrad.BufferCreate(m_cOpenCL))
return false;
//---
if(CheckPointer(m_cScoreTemp) == POINTER_INVALID)
{
m_cScoreTemp = new CBufferDouble();
if(CheckPointer(m_cScoreTemp) == POINTER_INVALID)
return false;
}
if(m_cScoreTemp.Total() != m_cScores.Total())
if(!m_cScoreTemp.BufferInit(m_cScores.Total(), 0))
return false;
if(m_cScoreTemp.GetIndex() < 0 && !m_cScoreTemp.BufferCreate(m_cOpenCL))
return false;
//--- Передача параметров кернелу
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionScoreGradients, def_attscr_outputs_grad, m_cAttentionOut.GetGradients().GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionScoreGradients, def_attscr_scores, m_cScores.GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionScoreGradients, def_attscr_scores_grad, m_cScoreGrad.GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionScoreGradients, def_attscr_scores_temp, m_cScoreTemp.GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionScoreGradients, def_attscr_values, m_cValues.GetOutputs().GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionScoreGradients, def_attscr_values_grad, m_cValues.GetGradients().GetIndex()))
return false;
if(!m_cOpenCL.SetArgument(def_k_AttentionScoreGradients, def_attscr_window, m_iWindow))
return false;
//--- Постановка кернела в очередь выполнения
int off_set[] = {0,0};
int NDRange[] = {m_iUnits,1};
if(!m_cOpenCL.Execute(def_k_AttentionScoreGradients, 2, off_set, NDRange))
return false;
//--- Загрузка результатов
if(!m_cValues.GetGradients().BufferRead())
return false;
m_cValues.GetOutputs().BufferFree();
m_cScores.BufferFree();
m_cScoreTemp.BufferFree();
m_cAttentionOut.GetOutputs().BufferFree();
//---
if(m_cQuerys.GetOutputs().GetIndex() < 0 && !m_cQuerys.GetOutputs().BufferCreate(m_cOpenCL))
return false;
if(m_cQuerys.GetGradients().GetIndex() < 0 && !m_cQuerys.GetGradients().BufferCreate(m_cOpenCL))
return false;
if(m_cKeys.GetOutputs().GetIndex() < 0 && !m_cKeys.GetOutputs().BufferCreate(m_cOpenCL))
return false;
if(m_cKeys.GetGradients().GetIndex() < 0 && !m_cKeys.GetGradients().BufferCreate(m_cOpenCL))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionHiddenGradients, def_atthgr_keys, m_cKeys.GetOutputs().GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionHiddenGradients, def_atthgr_keys_grad, m_cKeys.GetGradients().GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionHiddenGradients, def_atthgr_querys, m_cQuerys.GetOutputs().GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionHiddenGradients, def_atthgr_querys_grad, m_cQuerys.GetGradients().GetIndex()))
return false;
if(!m_cOpenCL.SetArgumentBuffer(def_k_AttentionHiddenGradients, def_atthgr_scores_grad, m_cScoreGrad.GetIndex()))
return false;
if(!m_cOpenCL.SetArgument(def_k_AttentionHiddenGradients, def_atthgr_key_size, m_iKeysSize))
return false;
if(!m_cOpenCL.Execute(def_k_AttentionHiddenGradients, 2, off_set, NDRange))
return false;
//--- Загрузка результатов
if(!m_cQuerys.GetGradients().BufferRead() ||
!m_cKeys.GetGradients().BufferRead())
return false;
//---
m_cScoreGrad.BufferFree();
}
//--- Перенос градиента ошибки на предыдущий слой
if(!m_cValues.CalcHiddenGradient(prevLayer))
return false;
if(!attention_grad.SumArray(prevLayer.GetGradients()))
return false;
if(!m_cQuerys.CalcHiddenGradient(prevLayer))
return false;
if(!attention_grad.SumArray(prevLayer.GetGradients()))
return false;
if(!m_cKeys.CalcHiddenGradient(prevLayer))
return false;
if(!prevLayer.GetGradients().SumArray(attention_grad))
return false;
//---
return true;
}
//+------------------------------------------------------------------+
//| Метод распредеделения градиента до матриц весов |
//+------------------------------------------------------------------+
bool CNeuronAttention::CalcDeltaWeights(CNeuronBase *prevLayer)
{
if(CheckPointer(m_cFF2)==POINTER_INVALID)
return false;
if(!m_cFF2.CalcDeltaWeights(m_cFF1))
return false;
if(!m_cFF1.CalcDeltaWeights(m_cAttentionOut))
return false;
if(CheckPointer(m_cQuerys)==POINTER_INVALID)
return false;
if(!m_cQuerys.CalcDeltaWeights(prevLayer))
return false;
if(CheckPointer(m_cKeys)==POINTER_INVALID)
return false;
if(!m_cKeys.CalcDeltaWeights(prevLayer))
return false;
if(CheckPointer(m_cValues)==POINTER_INVALID)
return false;
if(!m_cValues.CalcDeltaWeights(prevLayer))
return false;
//---
return true;
}
//+------------------------------------------------------------------+
//| Метод обновления матриц весовых коэффициентов |
//+------------------------------------------------------------------+
bool CNeuronAttention::UpdateWeights(int batch_size, double learningRate, double &Beta[], double &Lambda[])
{
if(CheckPointer(m_cQuerys)==POINTER_INVALID)
return false;
if(!m_cQuerys.UpdateWeights(batch_size, learningRate, Beta, Lambda))
return false;
if(CheckPointer(m_cKeys)==POINTER_INVALID)
return false;
if(!m_cKeys.UpdateWeights(batch_size, learningRate, Beta, Lambda))
return false;
if(CheckPointer(m_cValues)==POINTER_INVALID)
return false;
if(!m_cValues.UpdateWeights(batch_size, learningRate, Beta, Lambda))
return false;
if(CheckPointer(m_cFF1)==POINTER_INVALID)
return false;
if(!m_cFF1.UpdateWeights(batch_size, learningRate, Beta, Lambda))
return false;
if(CheckPointer(m_cFF2)==POINTER_INVALID)
return false;
if(!m_cFF2.UpdateWeights(batch_size, learningRate, Beta, Lambda))
return false;
//---
return true;
}
//+------------------------------------------------------------------+
//| Метод записи содержимого класса в файл |
//+------------------------------------------------------------------+
bool CNeuronAttention::Save(const int file_handle)
{
if(!CNeuronBase::Save(file_handle))
return false;
if(CheckPointer(m_cQuerys) == POINTER_INVALID)
return false;
if(!m_cQuerys.Save(file_handle))
return false;
if(CheckPointer(m_cKeys) == POINTER_INVALID)
return false;
if(!m_cKeys.Save(file_handle))
return false;
if(CheckPointer(m_cValues) == POINTER_INVALID)
return false;
if(!m_cValues.Save(file_handle))
return false;
if(CheckPointer(m_cAttentionOut) == POINTER_INVALID)
return false;
if(!m_cAttentionOut.Save(file_handle))
return false;
if(CheckPointer(m_cFF1) == POINTER_INVALID)
return false;
if(!m_cFF1.Save(file_handle))
return false;
if(CheckPointer(m_cFF2) == POINTER_INVALID)
return false;
if(!m_cFF2.Save(file_handle))
return false;
if(FileWriteInteger(file_handle, m_iUnits) <= 0)
return false;
if(FileWriteInteger(file_handle, m_iWindow) <= 0)
return false;
if(FileWriteInteger(file_handle, m_iKeysSize) <= 0)
return false;
//---
return true;
}
//+------------------------------------------------------------------+
//| Метод восстановления работоспособности класса из файла |
//+------------------------------------------------------------------+
bool CNeuronAttention::Load(const int file_handle)
{
if(!CNeuronBase::Load(file_handle))
return false;
if(CheckPointer(m_cQuerys) == POINTER_INVALID)
{
m_cQuerys = new CNeuronConv();
if(CheckPointer(m_cQuerys) == POINTER_INVALID)
return false;
}
if(FileReadInteger(file_handle)!=defNeuronConv || !m_cQuerys.Load(file_handle))
return false;
if(CheckPointer(m_cKeys) == POINTER_INVALID)
{
m_cKeys = new CNeuronConv();
if(CheckPointer(m_cKeys) == POINTER_INVALID)
return false;
}
if(FileReadInteger(file_handle)!=defNeuronConv || !m_cKeys.Load(file_handle))
return false;
if(CheckPointer(m_cValues) == POINTER_INVALID)
{
m_cValues = new CNeuronConv();
if(CheckPointer(m_cValues) == POINTER_INVALID)
return false;
}
if(FileReadInteger(file_handle)!=defNeuronConv || !m_cValues.Load(file_handle))
return false;
if(CheckPointer(m_cAttentionOut) == POINTER_INVALID)
{
m_cAttentionOut = new CNeuronBase();
if(CheckPointer(m_cAttentionOut) == POINTER_INVALID)
return false;
}
if(FileReadInteger(file_handle)!=defNeuronBase || !m_cAttentionOut.Load(file_handle))
return false;
if(CheckPointer(m_cFF1) == POINTER_INVALID)
{
m_cFF1 = new CNeuronConv();
if(CheckPointer(m_cFF1) == POINTER_INVALID)
return false;
}
if(FileReadInteger(file_handle)!=defNeuronConv || !m_cFF1.Load(file_handle))
return false;
if(CheckPointer(m_cFF2) == POINTER_INVALID)
{
m_cFF2 = new CNeuronConv();
if(CheckPointer(m_cFF2) == POINTER_INVALID)
return false;
}
if(FileReadInteger(file_handle)!=defNeuronConv || !m_cFF2.Load(file_handle))
return false;
m_iUnits = FileReadInteger(file_handle);
int scores = m_iUnits * m_iUnits;
m_iWindow = FileReadInteger(file_handle);
m_iKeysSize = FileReadInteger(file_handle);
if(CheckPointer(m_cScores) == POINTER_INVALID)
{
m_cScores = new CBufferDouble();
if(CheckPointer(m_cScores) == POINTER_INVALID)
return false;
}
if(!m_cScores.BufferInit(scores, 0))
return false;
//---
if(m_cFF2.GetOutputs() != m_cOutputs)
{
if(CheckPointer(m_cOutputs) != POINTER_INVALID)
delete m_cOutputs;
m_cOutputs = m_cFF2.GetOutputs();
}
//---
if(m_cFF2.GetGradients() != m_cGradients)
{
if(CheckPointer(m_cGradients) != POINTER_INVALID)
delete m_cGradients;
m_cGradients = m_cFF2.GetGradients();
}
//---
return true;
}
//+------------------------------------------------------------------+