210 lignes
16 Kio
MQL5
210 lignes
16 Kio
MQL5
//+------------------------------------------------------------------+
|
|
//| Study.mq5 |
|
|
//| Copyright DNG® |
|
|
//| https://www.mql5.com/ru/users/dng |
|
|
//+------------------------------------------------------------------+
|
|
#property copyright "Copyright DNG®"
|
|
#property link "https://www.mql5.com/ru/users/dng"
|
|
#property version "1.00"
|
|
//+------------------------------------------------------------------+
|
|
//| |
|
|
//+------------------------------------------------------------------+
|
|
#define Study
|
|
#include "Trajectory.mqh"
|
|
//+------------------------------------------------------------------+
|
|
//| Input parameters |
|
|
//+------------------------------------------------------------------+
|
|
input int Iterations = 1e7;
|
|
//+------------------------------------------------------------------+
|
|
//| |
|
|
//+------------------------------------------------------------------+
|
|
STrajectory Buffer[];
|
|
CNet Value;
|
|
//---
|
|
float dError;
|
|
datetime dtStudied;
|
|
//---
|
|
CBufferFloat State;
|
|
CBufferFloat *Result;
|
|
//+------------------------------------------------------------------+
|
|
//| Expert initialization function |
|
|
//+------------------------------------------------------------------+
|
|
int OnInit()
|
|
{
|
|
//---
|
|
ResetLastError();
|
|
if(!LoadTotalBase())
|
|
{
|
|
PrintFormat("Error of load study data: %d", GetLastError());
|
|
return INIT_FAILED;
|
|
}
|
|
//--- load models
|
|
float temp;
|
|
if(!Value.Load(FileName + "Val.nnw", temp, temp, temp, dtStudied, true))
|
|
{
|
|
CArrayObj *value = new CArrayObj();
|
|
if(!CreateValueDescriptions(value))
|
|
{
|
|
delete value;
|
|
return INIT_FAILED;
|
|
}
|
|
if(!Value.Create(value))
|
|
{
|
|
delete value;
|
|
return INIT_FAILED;
|
|
}
|
|
delete value;
|
|
//---
|
|
}
|
|
//---
|
|
Value.getResults(Result);
|
|
if(Result.Total() != NRewards)
|
|
{
|
|
PrintFormat("The scope of the Value does not match the rewards count (%d <> %d)", NRewards, Result.Total());
|
|
return INIT_FAILED;
|
|
}
|
|
//---
|
|
if(!EventChartCustom(ChartID(), 1, 0, 0, "Init"))
|
|
{
|
|
PrintFormat("Error of create study event: %d", GetLastError());
|
|
return INIT_FAILED;
|
|
}
|
|
//---
|
|
return(INIT_SUCCEEDED);
|
|
}
|
|
//+------------------------------------------------------------------+
|
|
//| Expert deinitialization function |
|
|
//+------------------------------------------------------------------+
|
|
void OnDeinit(const int reason)
|
|
{
|
|
//---
|
|
Value.Save(FileName + "Val.nnw", 0, 0, 0, TimeCurrent(), true);
|
|
delete Result;
|
|
}
|
|
//+------------------------------------------------------------------+
|
|
//| ChartEvent function |
|
|
//+------------------------------------------------------------------+
|
|
void OnChartEvent(const int id,
|
|
const long &lparam,
|
|
const double &dparam,
|
|
const string &sparam)
|
|
{
|
|
//---
|
|
if(id == 1001)
|
|
Train();
|
|
}
|
|
//+------------------------------------------------------------------+
|
|
//| Train function |
|
|
//+------------------------------------------------------------------+
|
|
void Train(void)
|
|
{
|
|
int total_tr = ArraySize(Buffer);
|
|
uint ticks = GetTickCount();
|
|
int check = 0;
|
|
//---
|
|
bool StopFlag = false;
|
|
for(int iter = 0; (iter < Iterations && !IsStopped() && !StopFlag); iter ++)
|
|
{
|
|
int tr = (int)((MathRand() / 32767.0) * (total_tr - 1));
|
|
int i = (int)((MathRand() * MathRand() / MathPow(32767, 2)) * (Buffer[tr].Total - 2 * ValueBars));
|
|
if(i < 0)
|
|
{
|
|
iter--;
|
|
continue;
|
|
check++;
|
|
if(check >= total_tr)
|
|
break;
|
|
}
|
|
check = 0;
|
|
//--- History data
|
|
State.AssignArray(Buffer[tr].States[i].state);
|
|
for(int state = 1; state < ValueBars; state++)
|
|
State.AddArray(Buffer[tr].States[i + state].state);
|
|
//--- Study
|
|
if(!Value.feedForward(GetPointer(State),1,true,(CBufferFloat*)NULL))
|
|
{
|
|
PrintFormat("%s -> %d", __FUNCTION__, __LINE__);
|
|
StopFlag = true;
|
|
break;
|
|
}
|
|
vector<float> target, result;
|
|
target.Assign(Buffer[tr].States[i + ValueBars - 1].rewards);
|
|
result.Assign(Buffer[tr].States[i + 2 * ValueBars - 1].rewards);
|
|
target = target - result*MathPow(DiscFactor,ValueBars);
|
|
Value.getResults(result);
|
|
Result.AssignArray(CAGrad(target - result) + result);
|
|
if(!Value.backProp(Result, (CBufferFloat *)NULL, (CBufferFloat *)NULL))
|
|
{
|
|
PrintFormat("%s -> %d", __FUNCTION__, __LINE__);
|
|
StopFlag = true;
|
|
break;
|
|
}
|
|
//---
|
|
if(GetTickCount() - ticks > 500)
|
|
{
|
|
string str = StringFormat("%-15s %5.2f%% -> Error %15.8f\n", "Value", iter * 100.0 / (double)(Iterations), Value.getRecentAverageError());
|
|
Comment(str);
|
|
ticks = GetTickCount();
|
|
}
|
|
}
|
|
Comment("");
|
|
//---
|
|
PrintFormat("%s -> %d -> %-15s %10.7f", __FUNCTION__, __LINE__, "Value", Value.getRecentAverageError());
|
|
ExpertRemove();
|
|
//---
|
|
}
|
|
//+------------------------------------------------------------------+
|
|
//| |
|
|
//+------------------------------------------------------------------+
|
|
vector<float> CAGrad(vector<float> &grad)
|
|
{
|
|
matrix<float> GG = grad.Outer(grad);
|
|
GG.ReplaceNan(0);
|
|
if(MathAbs(GG).Sum() == 0)
|
|
return grad;
|
|
float scale = MathSqrt(GG.Diag() + 1.0e-4f).Mean();
|
|
GG = GG / MathPow(scale, 2);
|
|
vector<float> Gg = GG.Mean(1);
|
|
float gg = Gg.Mean();
|
|
vector<float> w = vector<float>::Zeros(grad.Size());
|
|
float c = MathSqrt(gg + 1.0e-4f) * fCAGrad_C;
|
|
vector<float> w_best = w;
|
|
float obj_best = FLT_MAX;
|
|
vector<float> moment = vector<float>::Zeros(w.Size());
|
|
for(int i = 0; i < iCAGrad_Iters; i++)
|
|
{
|
|
vector<float> ww;
|
|
w.Activation(ww, AF_SOFTMAX);
|
|
float obj = ww.Dot(Gg) + c * MathSqrt(ww.MatMul(GG).Dot(ww) + 1.0e-4f);
|
|
if(MathAbs(obj) < obj_best)
|
|
{
|
|
obj_best = MathAbs(obj);
|
|
w_best = w;
|
|
}
|
|
if(i < (iCAGrad_Iters - 1))
|
|
{
|
|
float loss = -obj;
|
|
vector<float> derev = Gg + GG.MatMul(ww) * c / (MathSqrt(ww.MatMul(GG).Dot(ww) + 1.0e-4f) * 2) + ww.MatMul(GG) * c / (MathSqrt(ww.MatMul(GG).Dot(ww) + 1.0e-4f) * 2);
|
|
vector<float> delta = derev * loss;
|
|
ulong size = delta.Size();
|
|
matrix<float> ident = matrix<float>::Identity(size, size);
|
|
vector<float> ones = vector<float>::Ones(size);
|
|
matrix<float> sm_der = ones.Outer(ww);
|
|
sm_der = sm_der.Transpose() * (ident - sm_der);
|
|
delta = sm_der.MatMul(delta);
|
|
if(delta.Ptp() != 0)
|
|
delta = delta / delta.Ptp();
|
|
moment = delta * 0.8f + moment * 0.5f;
|
|
w += moment;
|
|
if(w.Ptp() != 0)
|
|
w = w / w.Ptp();
|
|
}
|
|
}
|
|
w_best.Activation(w, AF_SOFTMAX);
|
|
float gw_norm = MathSqrt(w.MatMul(GG).Dot(w) + 1.0e-4f);
|
|
float lmbda = c / (gw_norm + 1.0e-4f);
|
|
vector<float> result = ((w * lmbda + 1.0f / (float)grad.Size()) * grad) / (1 + MathPow(fCAGrad_C, 2));
|
|
//---
|
|
return result;
|
|
}
|
|
//+------------------------------------------------------------------+
|