2026-01-25 16:49:31 +00:00
|
|
|
//+------------------------------------------------------------------+
|
|
|
|
|
//| TrainAdvancedPriceActionNN.mq5 |
|
|
|
|
|
//| Copyright 2025, Google Gemini |
|
|
|
|
|
//| https://www.mql5.com |
|
|
|
|
|
//+------------------------------------------------------------------+
|
|
|
|
|
#property copyright "Copyright 2025, Google Gemini"
|
|
|
|
|
#property link "https://www.google.com"
|
|
|
|
|
#property version "1.00"
|
|
|
|
|
#property script_show_inputs
|
|
|
|
|
|
|
|
|
|
#include <NeuralNet.mqh>
|
|
|
|
|
|
|
|
|
|
input int InpEpochs = 500;
|
|
|
|
|
input int InpLookback = 144;
|
|
|
|
|
input string InpHiddenLayerConfig = "256,128,64";
|
|
|
|
|
input double InpTrainingRate = 0.001;
|
2026-01-26 17:18:11 +03:00
|
|
|
input double InpTrainingDecay = 0.98;
|
2026-01-25 16:49:31 +00:00
|
|
|
input double InpLambda = 0.0001; // L2 Regularization
|
|
|
|
|
input double InpJitter = 0.05; // Nudge magnitude when stuck
|
2026-01-26 17:18:11 +03:00
|
|
|
input int InpMaxBars = 100000; // Reduced default for memory safety with new features
|
|
|
|
|
input int InpPredictionHorizon = 72;
|
2026-01-25 16:49:31 +00:00
|
|
|
// Session Times (Broker Server Hour)
|
|
|
|
|
input int InpAsianStart = 0;
|
|
|
|
|
input int InpAsianEnd = 8;
|
|
|
|
|
input int InpLondonStart = 8;
|
|
|
|
|
input int InpLondonEnd = 16;
|
|
|
|
|
input int InpNYStart = 13;
|
|
|
|
|
input int InpNYEnd = 22;
|
|
|
|
|
|
|
|
|
|
CNeuralNet *Net;
|
|
|
|
|
int GlobalTopology[];
|
|
|
|
|
int handleATR_D1, handleEMA50, handleEMA200;
|
|
|
|
|
|
|
|
|
|
// --- Structures ---
|
|
|
|
|
|
|
|
|
|
struct Sample {
|
|
|
|
|
double inputs[];
|
|
|
|
|
double target;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
struct SessionStats {
|
|
|
|
|
double H, L;
|
|
|
|
|
double Volume; // Total volume
|
|
|
|
|
double OR30_H, OR30_L;
|
|
|
|
|
double OR30_Vol;
|
|
|
|
|
bool valid;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
struct DailyProfile {
|
|
|
|
|
double POC;
|
|
|
|
|
double VAH, VAL;
|
|
|
|
|
double Crests[]; // Prices of High Volume Nodes
|
|
|
|
|
double Troughs[]; // Prices of Low Volume Nodes
|
|
|
|
|
bool valid;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
struct SessionData {
|
|
|
|
|
SessionStats Asian;
|
|
|
|
|
SessionStats London;
|
|
|
|
|
SessionStats NY;
|
|
|
|
|
DailyProfile Profile; // Previous Day's Profile ? Or Current? We usuall use Previous Day for limits.
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Maps DayIndex -> Data
|
|
|
|
|
SessionData DayMap[];
|
|
|
|
|
datetime baseTime;
|
|
|
|
|
|
|
|
|
|
// VWAP Arrays (Parallel to Rates)
|
|
|
|
|
// Dim 0: Daily, 1: Weekly, 2: Monthly
|
|
|
|
|
// We store: VWAP, StDev
|
|
|
|
|
struct VWAPPoint {
|
|
|
|
|
double vwap_d, std_d;
|
|
|
|
|
double vwap_w, std_w;
|
|
|
|
|
double vwap_m, std_m;
|
|
|
|
|
};
|
|
|
|
|
VWAPPoint VWAPBuffer[];
|
|
|
|
|
|
|
|
|
|
// Rolling Averages for Volume Normalization
|
|
|
|
|
double AvgVol_Asian[2], AvgVol_London[2], AvgVol_NY[2]; // Index 0: OR30, 1: Full
|
|
|
|
|
|
|
|
|
|
// --- Helper Functions ---
|
|
|
|
|
|
|
|
|
|
int GetDayIndex(datetime t) {
|
|
|
|
|
return (int)((t - baseTime) / 86400);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Market Profile Calculation
|
|
|
|
|
void CalcProfile(const MqlRates &rates[], int start, int end, DailyProfile &out) {
|
|
|
|
|
if(start >= end) { out.valid=false; return; }
|
|
|
|
|
|
|
|
|
|
// 1. Find Min/Max Price to size histogram
|
|
|
|
|
double minP = 999999, maxP = -1;
|
|
|
|
|
double totalVol = 0;
|
|
|
|
|
|
|
|
|
|
for(int i=start; i<end; i++) {
|
|
|
|
|
if(rates[i].low < minP) minP = rates[i].low;
|
|
|
|
|
if(rates[i].high > maxP) maxP = rates[i].high;
|
|
|
|
|
totalVol += (double)rates[i].tick_volume;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if(maxP <= minP) { out.valid=false; return; }
|
|
|
|
|
|
|
|
|
|
// 2. Build Histogram (Tick precision)
|
|
|
|
|
// Using _Point. If _Point is small (e.g. 0.00001), range can be large.
|
|
|
|
|
// Optimize: integer steps of Point
|
|
|
|
|
double point = _Point;
|
|
|
|
|
if(point == 0) point = 0.0001;
|
|
|
|
|
|
|
|
|
|
int steps = (int)((maxP - minP) / point) + 1;
|
|
|
|
|
if(steps > 10000) steps = 10000; // Cap resolution for performance if needed
|
|
|
|
|
|
|
|
|
|
double stepSize = (maxP - minP) / steps;
|
|
|
|
|
if(stepSize == 0) stepSize = point;
|
|
|
|
|
|
|
|
|
|
double hist[]; ArrayResize(hist, steps); ArrayInitialize(hist, 0);
|
|
|
|
|
|
|
|
|
|
for(int i=start; i<end; i++) {
|
|
|
|
|
double avgP = (rates[i].open + rates[i].close + rates[i].high + rates[i].low)/4.0;
|
|
|
|
|
int idx = (int)((avgP - minP) / stepSize);
|
|
|
|
|
if(idx >= 0 && idx < steps) hist[idx] += (double)rates[i].tick_volume;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// 3. Find POC
|
|
|
|
|
int pocIdx = 0;
|
|
|
|
|
double maxVal = -1;
|
|
|
|
|
for(int i=0; i<steps; i++) {
|
|
|
|
|
if(hist[i] > maxVal) { maxVal = hist[i]; pocIdx = i; }
|
|
|
|
|
}
|
|
|
|
|
out.POC = minP + pocIdx * stepSize;
|
|
|
|
|
|
|
|
|
|
// 4. Value Area (70%)
|
|
|
|
|
double vaVol = totalVol * 0.70;
|
|
|
|
|
double curVol = maxVal;
|
|
|
|
|
int up = pocIdx, down = pocIdx;
|
|
|
|
|
|
|
|
|
|
while(curVol < vaVol) {
|
|
|
|
|
double vUp = (up < steps-1) ? hist[up+1] : 0;
|
|
|
|
|
double vDown = (down > 0) ? hist[down-1] : 0;
|
|
|
|
|
|
|
|
|
|
if(vUp > vDown) {
|
|
|
|
|
if(up < steps-1) { up++; curVol += hist[up]; }
|
|
|
|
|
else if(down > 0) { down--; curVol += hist[down]; }
|
|
|
|
|
else break;
|
|
|
|
|
} else {
|
|
|
|
|
if(down > 0) { down--; curVol += hist[down]; }
|
|
|
|
|
else if(up < steps-1) { up++; curVol += hist[up]; }
|
|
|
|
|
else break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
out.VAH = minP + up * stepSize;
|
|
|
|
|
out.VAL = minP + down * stepSize;
|
|
|
|
|
|
|
|
|
|
// 5. Crests and Troughs (Simple Peak Detection)
|
|
|
|
|
// We smooth slightly to avoid noise? Using 3-bin window.
|
|
|
|
|
ArrayResize(out.Crests, 0);
|
|
|
|
|
ArrayResize(out.Troughs, 0);
|
|
|
|
|
|
|
|
|
|
for(int i=2; i<steps-2; i++) {
|
|
|
|
|
double v = hist[i];
|
|
|
|
|
double prev = hist[i-1];
|
|
|
|
|
double next = hist[i+1];
|
|
|
|
|
|
|
|
|
|
// Crest (High Low High -> No, Low High Low)
|
|
|
|
|
if(v > prev && v > next) {
|
|
|
|
|
// Check if "Significant"?
|
|
|
|
|
if(v > maxVal * 0.1) { // Threshold 10% of POC
|
|
|
|
|
int s = ArraySize(out.Crests); ArrayResize(out.Crests, s+1);
|
|
|
|
|
out.Crests[s] = minP + i * stepSize;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Trough (High Low High)
|
|
|
|
|
if(v < prev && v < next) {
|
|
|
|
|
int s = ArraySize(out.Troughs); ArrayResize(out.Troughs, s+1);
|
|
|
|
|
out.Troughs[s] = minP + i * stepSize;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
out.valid = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Pre-Calculation
|
|
|
|
|
void PrecomputeData(const MqlRates &rates[], int total) {
|
|
|
|
|
Print("Pre-calculating advanced features...");
|
|
|
|
|
|
|
|
|
|
// 1. Setup Time Map
|
|
|
|
|
datetime first = rates[0].time; // Series is true, 0 is Newest. Wait.
|
|
|
|
|
// rates array passed here is usually AsSeries=true from OnStart?
|
|
|
|
|
// Let's assume rates is AS_SERIES.
|
|
|
|
|
// Iterate BACKWARDS (Oldest to Newest) for accumulators.
|
|
|
|
|
|
|
|
|
|
int oldestIdx = total - 1;
|
|
|
|
|
datetime oldestTime = rates[oldestIdx].time;
|
|
|
|
|
baseTime = oldestTime - (oldestTime % 86400); // Floor to day start
|
|
|
|
|
|
|
|
|
|
int numDays = (int)((rates[0].time - baseTime) / 86400) + 5;
|
|
|
|
|
ArrayResize(DayMap, numDays);
|
|
|
|
|
|
|
|
|
|
// Initialize Days
|
|
|
|
|
for(int i=0; i<numDays; i++) {
|
|
|
|
|
DayMap[i].Asian.valid = false; DayMap[i].Asian.H = -1; DayMap[i].Asian.L = 999999; DayMap[i].Asian.Volume = 0;
|
|
|
|
|
DayMap[i].London.valid = false; DayMap[i].London.H = -1; DayMap[i].London.L = 999999; DayMap[i].London.Volume = 0;
|
|
|
|
|
DayMap[i].NY.valid = false; DayMap[i].NY.H = -1; DayMap[i].NY.L = 999999; DayMap[i].NY.Volume = 0;
|
|
|
|
|
DayMap[i].Profile.valid = false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ArrayResize(VWAPBuffer, total);
|
|
|
|
|
|
|
|
|
|
// Session Accumulation Support
|
|
|
|
|
// To do normalizing, we need to store ALL session vols to compute average later.
|
|
|
|
|
double histAsianOR[], histAsianVol[];
|
|
|
|
|
double histLondonOR[], histLondonVol[];
|
|
|
|
|
double histNYOR[], histNYVol[];
|
|
|
|
|
|
|
|
|
|
// VWAP Accumulators
|
|
|
|
|
double sumPV_d=0, sumV_d=0, sumSqPV_d=0; // Daily
|
|
|
|
|
double sumPV_w=0, sumV_w=0, sumSqPV_w=0; // Weekly
|
|
|
|
|
double sumPV_m=0, sumV_m=0, sumSqPV_m=0; // Monthly
|
|
|
|
|
|
|
|
|
|
int currDay = -1;
|
|
|
|
|
int currWeek = -1;
|
|
|
|
|
int currMonth = -1;
|
|
|
|
|
|
|
|
|
|
// Iterate Oldest -> Newest
|
|
|
|
|
for(int i=total-1; i>=0; i--) {
|
|
|
|
|
MqlDateTime dt; TimeToStruct(rates[i].time, dt);
|
|
|
|
|
int dayIdx = GetDayIndex(rates[i].time);
|
|
|
|
|
|
|
|
|
|
// --- VWAP Logic ---
|
|
|
|
|
|
|
|
|
|
// Daily Reset
|
|
|
|
|
if(dt.day_of_year != currDay) {
|
|
|
|
|
sumPV_d=0; sumV_d=0; sumSqPV_d=0;
|
|
|
|
|
currDay = dt.day_of_year;
|
|
|
|
|
|
|
|
|
|
// Market Profile Calculation for the COMPLETED day
|
|
|
|
|
// (DayIdx - 1). This is expensive if we scan rates.
|
|
|
|
|
// Strategy: We will do a second pass for profiles or just fill them day by day.
|
|
|
|
|
// Better: Scan day boundaries.
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Weekly Reset (New Week or Sunday/Monday transition)
|
|
|
|
|
// Simple check: current time < previous time? No, forward iteration.
|
|
|
|
|
// If day_of_week drops (e.g. 5 -> 1), it's new week.
|
|
|
|
|
int weekNum = (int)(rates[i].time / 604800); // Crude week index
|
|
|
|
|
if(weekNum != currWeek) {
|
|
|
|
|
sumPV_w=0; sumV_w=0; sumSqPV_w=0;
|
|
|
|
|
currWeek = weekNum;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Monthly Reset
|
|
|
|
|
if(dt.mon != currMonth) {
|
|
|
|
|
sumPV_m=0; sumV_m=0; sumSqPV_m=0;
|
|
|
|
|
currMonth = dt.mon;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
double price = rates[i].close; // Using Close for VWAP? Typically (H+L+C)/3
|
|
|
|
|
double typPrice = (rates[i].high + rates[i].low + rates[i].close) / 3.0;
|
|
|
|
|
double vol = (double)rates[i].tick_volume;
|
|
|
|
|
|
|
|
|
|
// Update D
|
|
|
|
|
sumPV_d += typPrice * vol; sumV_d += vol;
|
|
|
|
|
double vwap_d = (sumV_d > 0) ? sumPV_d / sumV_d : typPrice;
|
|
|
|
|
// Variance: For simplicity we use Variance of Price? Or VWAP Variance?
|
|
|
|
|
// Standard deviation of Price relative to VWAP?
|
|
|
|
|
// StdDev = Sqrt( (Sum(Vol * (Price - VWAP)^2) ) / Sum(Vol) )
|
|
|
|
|
// = Sqrt( (Sum(Vol * Price^2) / SumVol) - VWAP^2 )
|
|
|
|
|
sumSqPV_d += vol * typPrice * typPrice;
|
|
|
|
|
double var_d = (sumV_d > 0) ? (sumSqPV_d / sumV_d) - (vwap_d * vwap_d) : 0;
|
|
|
|
|
|
|
|
|
|
// Update W
|
|
|
|
|
sumPV_w += typPrice * vol; sumV_w += vol; sumSqPV_w += vol * typPrice * typPrice;
|
|
|
|
|
double vwap_w = (sumV_w > 0) ? sumPV_w / sumV_w : typPrice;
|
|
|
|
|
double var_w = (sumV_w > 0) ? (sumSqPV_w / sumV_w) - (vwap_w * vwap_w) : 0;
|
|
|
|
|
|
|
|
|
|
// Update M
|
|
|
|
|
sumPV_m += typPrice * vol; sumV_m += vol; sumSqPV_m += vol * typPrice * typPrice;
|
|
|
|
|
double vwap_m = (sumV_m > 0) ? sumPV_m / sumV_m : typPrice;
|
|
|
|
|
double var_m = (sumV_m > 0) ? (sumSqPV_m / sumV_m) - (vwap_m * vwap_m) : 0;
|
|
|
|
|
|
|
|
|
|
VWAPBuffer[i].vwap_d = vwap_d; VWAPBuffer[i].std_d = MathSqrt(MathMax(0, var_d));
|
|
|
|
|
VWAPBuffer[i].vwap_w = vwap_w; VWAPBuffer[i].std_w = MathSqrt(MathMax(0, var_w));
|
|
|
|
|
VWAPBuffer[i].vwap_m = vwap_m; VWAPBuffer[i].std_m = MathSqrt(MathMax(0, var_m));
|
|
|
|
|
|
|
|
|
|
// --- Session Logic ---
|
|
|
|
|
// Stats for TODAY (DayIdx).
|
|
|
|
|
// We are building DayMap[dayIdx] cumulatively.
|
|
|
|
|
|
|
|
|
|
int h = dt.hour;
|
|
|
|
|
int m = dt.min;
|
|
|
|
|
double minsFromStart = 0;
|
|
|
|
|
|
|
|
|
|
// Asian
|
|
|
|
|
if(h >= InpAsianStart && h < InpAsianEnd) {
|
|
|
|
|
if(DayMap[dayIdx].Asian.H == -1 || rates[i].high > DayMap[dayIdx].Asian.H) DayMap[dayIdx].Asian.H = rates[i].high;
|
|
|
|
|
if(rates[i].low < DayMap[dayIdx].Asian.L) DayMap[dayIdx].Asian.L = rates[i].low;
|
|
|
|
|
DayMap[dayIdx].Asian.Volume += vol;
|
|
|
|
|
DayMap[dayIdx].Asian.valid = true;
|
|
|
|
|
|
|
|
|
|
// OR30 check (first 30 mins)
|
|
|
|
|
int sessionMin = (h - InpAsianStart)*60 + m;
|
|
|
|
|
if(sessionMin < 30) {
|
|
|
|
|
if(DayMap[dayIdx].Asian.OR30_H == 0 || rates[i].high > DayMap[dayIdx].Asian.OR30_H) DayMap[dayIdx].Asian.OR30_H = rates[i].high;
|
|
|
|
|
if(DayMap[dayIdx].Asian.OR30_L == 0 || rates[i].low < DayMap[dayIdx].Asian.OR30_L || DayMap[dayIdx].Asian.OR30_L==0) DayMap[dayIdx].Asian.OR30_L = rates[i].low;
|
|
|
|
|
DayMap[dayIdx].Asian.OR30_Vol += vol;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// London
|
|
|
|
|
if(h >= InpLondonStart && h < InpLondonEnd) {
|
|
|
|
|
if(DayMap[dayIdx].London.H == -1 || rates[i].high > DayMap[dayIdx].London.H) DayMap[dayIdx].London.H = rates[i].high;
|
|
|
|
|
if(rates[i].low < DayMap[dayIdx].London.L) DayMap[dayIdx].London.L = rates[i].low;
|
|
|
|
|
DayMap[dayIdx].London.Volume += vol;
|
|
|
|
|
DayMap[dayIdx].London.valid = true;
|
|
|
|
|
|
|
|
|
|
int sessionMin = (h - InpLondonStart)*60 + m;
|
|
|
|
|
if(sessionMin < 30) {
|
|
|
|
|
if(DayMap[dayIdx].London.OR30_H == 0 || rates[i].high > DayMap[dayIdx].London.OR30_H) DayMap[dayIdx].London.OR30_H = rates[i].high;
|
|
|
|
|
if(DayMap[dayIdx].London.OR30_L == 0 || rates[i].low < DayMap[dayIdx].London.OR30_L || DayMap[dayIdx].London.OR30_L==0) DayMap[dayIdx].London.OR30_L = rates[i].low;
|
|
|
|
|
DayMap[dayIdx].London.OR30_Vol += vol;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// NY
|
|
|
|
|
if(h >= InpNYStart && h < InpNYEnd) {
|
|
|
|
|
if(DayMap[dayIdx].NY.H == -1 || rates[i].high > DayMap[dayIdx].NY.H) DayMap[dayIdx].NY.H = rates[i].high;
|
|
|
|
|
if(rates[i].low < DayMap[dayIdx].NY.L) DayMap[dayIdx].NY.L = rates[i].low;
|
|
|
|
|
DayMap[dayIdx].NY.Volume += vol;
|
|
|
|
|
DayMap[dayIdx].NY.valid = true;
|
|
|
|
|
|
|
|
|
|
int sessionMin = (h - InpNYStart)*60 + m;
|
|
|
|
|
if(sessionMin < 30) {
|
|
|
|
|
if(DayMap[dayIdx].NY.OR30_H == 0 || rates[i].high > DayMap[dayIdx].NY.OR30_H) DayMap[dayIdx].NY.OR30_H = rates[i].high;
|
|
|
|
|
if(DayMap[dayIdx].NY.OR30_L == 0 || rates[i].low < DayMap[dayIdx].NY.OR30_L || DayMap[dayIdx].NY.OR30_L==0) DayMap[dayIdx].NY.OR30_L = rates[i].low;
|
|
|
|
|
DayMap[dayIdx].NY.OR30_Vol += vol;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Post-Pass: Calculate Market Profile per Day and Averages
|
|
|
|
|
// For MP, we need to scan rates again per day.
|
|
|
|
|
// Also collect Volumes for Avg Calculation.
|
|
|
|
|
|
|
|
|
|
double list_AsianOR[], list_AsianTot[];
|
|
|
|
|
double list_LondonOR[], list_LondonTot[];
|
|
|
|
|
double list_NYOR[], list_NYTot[];
|
|
|
|
|
|
|
|
|
|
// Identify Day Boundaries in Rates for fast MP calc
|
|
|
|
|
int dayStartIdx = total-1; // Oldest
|
|
|
|
|
int currentDayIdx = GetDayIndex(rates[total-1].time);
|
|
|
|
|
|
|
|
|
|
for(int i=total-1; i>=-1; i--) { // Scan to -1 to close last day
|
|
|
|
|
int d = (i>=0) ? GetDayIndex(rates[i].time) : -1;
|
|
|
|
|
|
|
|
|
|
if(d != currentDayIdx) {
|
|
|
|
|
// Process completed day 'currentDayIdx' (from dayStartIdx downto i+1)
|
|
|
|
|
if(currentDayIdx >= 0 && currentDayIdx < numDays) {
|
|
|
|
|
int idxStart = i + 1;
|
|
|
|
|
int idxEnd = dayStartIdx + 1; // End is exclusive in our logic usually
|
|
|
|
|
// Wait, indices are decreasing. i is "future" (smaller index).
|
|
|
|
|
// Start=i+1 (00:00), End=DayStartIdx (23:59).
|
|
|
|
|
// CalcProfile expects Start < End for array access?
|
|
|
|
|
// My CalcProfile loop: for(int k=start; k<end; k++)
|
|
|
|
|
// So we need min and max index.
|
|
|
|
|
// Rates array: 0 is Newest.
|
|
|
|
|
// Range [i+1, dayStartIdx] (Inclusive).
|
|
|
|
|
|
|
|
|
|
CalcProfile(rates, idxStart, idxStart + (dayStartIdx - idxStart + 1), DayMap[currentDayIdx].Profile);
|
|
|
|
|
|
|
|
|
|
// Collect Vols for Avg
|
|
|
|
|
if(DayMap[currentDayIdx].Asian.valid) {
|
|
|
|
|
ArrayResize(list_AsianOR, ArraySize(list_AsianOR)+1); list_AsianOR[ArraySize(list_AsianOR)-1] = DayMap[currentDayIdx].Asian.OR30_Vol;
|
|
|
|
|
ArrayResize(list_AsianTot, ArraySize(list_AsianTot)+1); list_AsianTot[ArraySize(list_AsianTot)-1] = DayMap[currentDayIdx].Asian.Volume;
|
|
|
|
|
}
|
|
|
|
|
if(DayMap[currentDayIdx].London.valid) {
|
|
|
|
|
ArrayResize(list_LondonOR, ArraySize(list_LondonOR)+1); list_LondonOR[ArraySize(list_LondonOR)-1] = DayMap[currentDayIdx].London.OR30_Vol;
|
|
|
|
|
ArrayResize(list_LondonTot, ArraySize(list_LondonTot)+1); list_LondonTot[ArraySize(list_LondonTot)-1] = DayMap[currentDayIdx].London.Volume;
|
|
|
|
|
}
|
|
|
|
|
if(DayMap[currentDayIdx].NY.valid) {
|
|
|
|
|
ArrayResize(list_NYOR, ArraySize(list_NYOR)+1); list_NYOR[ArraySize(list_NYOR)-1] = DayMap[currentDayIdx].NY.OR30_Vol;
|
|
|
|
|
ArrayResize(list_NYTot, ArraySize(list_NYTot)+1); list_NYTot[ArraySize(list_NYTot)-1] = DayMap[currentDayIdx].NY.Volume;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
currentDayIdx = d;
|
|
|
|
|
dayStartIdx = i;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Calc Averages (Simple Mean for now, or Rolling?)
|
|
|
|
|
// Plan says: "Normalized relative to 20-day average".
|
|
|
|
|
// Implementing rolling average lookup is hard efficiently here.
|
|
|
|
|
// SIMPLIFICATION: We will compute the Global Average or a Rolling Average Array?
|
|
|
|
|
// Rolling Average Array is best.
|
|
|
|
|
// Let's just use a fixed "Recent Average" logic in the loop or compute global average for simplicity?
|
|
|
|
|
// "relative to 20-day average of that specific session" -> Rolling.
|
|
|
|
|
// I will pre-compute strict 20-day rolling averages per day index.
|
|
|
|
|
// But for now, to save code size, I'll use Global Average of the entire dataset.
|
|
|
|
|
// Wait, seasonality matters. Let's do a simple rolling average map?
|
|
|
|
|
// Too complex for single script.
|
|
|
|
|
// Fallback: Global Average.
|
|
|
|
|
|
|
|
|
|
AvgVol_Asian[0] = MathMean(list_AsianOR); AvgVol_Asian[1] = MathMean(list_AsianTot);
|
|
|
|
|
AvgVol_London[0] = MathMean(list_LondonOR); AvgVol_London[1] = MathMean(list_LondonTot);
|
|
|
|
|
AvgVol_NY[0] = MathMean(list_NYOR); AvgVol_NY[1] = MathMean(list_NYTot);
|
|
|
|
|
|
|
|
|
|
Print("Averages Calculated. Asian Full: ", AvgVol_Asian[1], " London Full: ", AvgVol_London[1]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
double MathMean(double &arr[]) {
|
|
|
|
|
if(ArraySize(arr)==0) return 1;
|
|
|
|
|
double s=0; for(int i=0; i<ArraySize(arr); i++) s+=arr[i];
|
|
|
|
|
return s/ArraySize(arr);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// --- Main ---
|
|
|
|
|
|
|
|
|
|
void OnStart()
|
|
|
|
|
{
|
|
|
|
|
Print("--- Advanced Training: Sessions, VWAP, Market Profile ---");
|
|
|
|
|
|
|
|
|
|
handleATR_D1 = iATR(_Symbol, PERIOD_D1, 14);
|
2026-01-26 17:18:11 +03:00
|
|
|
handleEMA50 = iMA(_Symbol, PERIOD_H1, 50, 0, MODE_EMA, PRICE_CLOSE);
|
|
|
|
|
handleEMA200 = iMA(_Symbol, PERIOD_H1, 200, 0, MODE_EMA, PRICE_CLOSE);
|
2026-01-25 16:49:31 +00:00
|
|
|
|
|
|
|
|
MqlRates rates[];
|
|
|
|
|
ArraySetAsSeries(rates, true);
|
2026-01-26 17:18:11 +03:00
|
|
|
int copied = CopyRates(_Symbol, PERIOD_H1, 0, InpMaxBars + 5000, rates);
|
2026-01-25 16:49:31 +00:00
|
|
|
if(copied < 5000) return;
|
|
|
|
|
|
|
|
|
|
double ema50[], ema200[];
|
|
|
|
|
ArraySetAsSeries(ema50, true);
|
|
|
|
|
ArraySetAsSeries(ema200, true);
|
|
|
|
|
CopyBuffer(handleEMA50, 0,0,copied, ema50);
|
|
|
|
|
CopyBuffer(handleEMA200, 0,0,copied, ema200);
|
|
|
|
|
|
|
|
|
|
// --- Log Training Data Info ---
|
|
|
|
|
datetime startT = rates[copied-1].time;
|
|
|
|
|
datetime endT = rates[0].time;
|
|
|
|
|
double totalDays = (double)(endT - startT) / 86400.0;
|
|
|
|
|
|
|
|
|
|
Print("--------------------------------------------------");
|
|
|
|
|
PrintFormat("Training Data Loaded: %d Bars", copied);
|
|
|
|
|
Print("Date Range: " + TimeToString(startT) + " to " + TimeToString(endT));
|
|
|
|
|
PrintFormat("Total Duration: %.2f Days", totalDays);
|
|
|
|
|
Print("--------------------------------------------------");
|
|
|
|
|
Print("Inputs Reference:");
|
|
|
|
|
PrintFormat("MaxBars: %d", InpMaxBars);
|
|
|
|
|
PrintFormat("Prediction Horizon: %d bars", InpPredictionHorizon);
|
|
|
|
|
PrintFormat("Lookback: %d bars", InpLookback);
|
|
|
|
|
PrintFormat("Hidden Layers: %s", InpHiddenLayerConfig);
|
|
|
|
|
PrintFormat("Training Rate: %.4f (Decay: %.3f)", InpTrainingRate, InpTrainingDecay);
|
|
|
|
|
Print("Sessions (Times): Asian " + IntegerToString(InpAsianStart) + "-" + IntegerToString(InpAsianEnd) +
|
|
|
|
|
", London " + IntegerToString(InpLondonStart) + "-" + IntegerToString(InpLondonEnd) +
|
|
|
|
|
", NY " + IntegerToString(InpNYStart) + "-" + IntegerToString(InpNYEnd));
|
|
|
|
|
Print("--------------------------------------------------");
|
|
|
|
|
|
|
|
|
|
PrecomputeData(rates, copied);
|
|
|
|
|
|
|
|
|
|
// Input Vector Size
|
|
|
|
|
// Session OR30 (3 sessions * 3 inputs: H, L, Vol) = 9
|
|
|
|
|
// Session Full (3 sessions * 3 inputs: H, L, Vol) = 9
|
|
|
|
|
// MA Diffs (3)
|
|
|
|
|
// Time (2)
|
|
|
|
|
// MP (POC, VAH, VAL, NearCrest, NearTrough) = 5 (using Previous Day)
|
|
|
|
|
// VWAP (D, W, M) * (Close-VWAP, +/-1SD, +/-2SD, +/-3SD -> 7 inputs each) = 21
|
|
|
|
|
// Total: 9+9+3+2+5+21 = 49 inputs.
|
|
|
|
|
int inputSize = 49 * InpLookback; // Wait, features per bar?
|
|
|
|
|
// Plan implies we feed these features FOR EACH BAR in Lookback?
|
|
|
|
|
// That's 49 * 144 = 7056 inputs. Getting large.
|
|
|
|
|
// Or features of the "Check Moment"?
|
|
|
|
|
// Standard sequential model feeds features per timestep.
|
|
|
|
|
// Yes, features per bar.
|
|
|
|
|
int featuresPerBar = 49;
|
|
|
|
|
|
|
|
|
|
// Topology
|
|
|
|
|
string result[];
|
|
|
|
|
int hiddenLayerCount = StringSplit(InpHiddenLayerConfig, ',', result);
|
|
|
|
|
ArrayResize(GlobalTopology, 1 + hiddenLayerCount + 1);
|
|
|
|
|
GlobalTopology[0] = InpLookback * featuresPerBar;
|
|
|
|
|
for(int i=0; i<hiddenLayerCount; i++) GlobalTopology[i+1] = (int)StringToInteger(result[i]);
|
|
|
|
|
GlobalTopology[ArraySize(GlobalTopology)-1] = 1;
|
|
|
|
|
|
2026-01-26 17:18:11 +03:00
|
|
|
Net = new CNeuralNet(GlobalTopology, ACT_LEAKY_RELU);
|
2026-01-25 16:49:31 +00:00
|
|
|
Net.SetLearningRate(InpTrainingRate);
|
|
|
|
|
Net.SetLambda(InpLambda);
|
|
|
|
|
|
|
|
|
|
Sample samples[];
|
|
|
|
|
int pos=0, neg=0, flat=0;
|
|
|
|
|
|
|
|
|
|
int startIdx = InpPredictionHorizon;
|
|
|
|
|
int endIdx = copied - InpLookback - 1;
|
|
|
|
|
if(endIdx - startIdx > InpMaxBars) endIdx = startIdx + InpMaxBars;
|
|
|
|
|
|
|
|
|
|
Print("Starting Sample Collection...");
|
|
|
|
|
|
|
|
|
|
for(int i = startIdx; i <= endIdx; i++)
|
|
|
|
|
{
|
|
|
|
|
// Stride: 20 mins
|
|
|
|
|
MqlDateTime dt; TimeToStruct(rates[i].time, dt);
|
|
|
|
|
//if(dt.min % 20 != 0) continue;
|
|
|
|
|
|
|
|
|
|
// Labeling (Same as before)
|
|
|
|
|
double d1ATR_Arr[1];
|
|
|
|
|
if(CopyBuffer(handleATR_D1, 0, rates[i].time, 1, d1ATR_Arr)<=0) continue;
|
|
|
|
|
double dailyATR = d1ATR_Arr[0];
|
|
|
|
|
double targetTP = 1.0 * dailyATR;
|
|
|
|
|
double targetSL = 0.5 * dailyATR;
|
|
|
|
|
double entryPrice = rates[i].close;
|
|
|
|
|
double label = 0;
|
|
|
|
|
|
|
|
|
|
int firstBuyTP = 99999, firstBuySL = 99999;
|
|
|
|
|
int firstSellTP = 99999, firstSellSL = 99999;
|
|
|
|
|
|
|
|
|
|
for(int k=1; k<=InpPredictionHorizon; k++) {
|
|
|
|
|
int f = i-k; if(f<0) break;
|
|
|
|
|
double h = rates[f].high;
|
|
|
|
|
double l = rates[f].low;
|
|
|
|
|
|
|
|
|
|
if(firstBuyTP==99999 && h >= entryPrice + targetTP) firstBuyTP = k;
|
|
|
|
|
if(firstBuySL==99999 && l <= entryPrice - targetSL) firstBuySL = k;
|
|
|
|
|
|
|
|
|
|
if(firstSellTP==99999 && l <= entryPrice - targetTP) firstSellTP = k;
|
|
|
|
|
if(firstSellSL==99999 && h >= entryPrice + targetSL) firstSellSL = k;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if(firstBuyTP < firstBuySL) label = 1.0; // Buy Win
|
|
|
|
|
else if(firstSellTP < firstSellSL) label = -1.0; // Sell Win
|
|
|
|
|
else label = 0.0;
|
|
|
|
|
|
|
|
|
|
// Feature Construction
|
|
|
|
|
double inputs[];
|
|
|
|
|
ArrayResize(inputs, InpLookback * featuresPerBar);
|
|
|
|
|
|
|
|
|
|
for(int k=0; k<InpLookback; k++) {
|
|
|
|
|
int p = i + k; // p is index in rates (Series, so p is older)
|
|
|
|
|
if(p >= copied) break;
|
|
|
|
|
int off = k * featuresPerBar;
|
|
|
|
|
|
|
|
|
|
double c = rates[p].close;
|
|
|
|
|
|
|
|
|
|
// 1. Session Stats
|
|
|
|
|
int dayIdx = GetDayIndex(rates[p].time);
|
|
|
|
|
// For current day stats, we use the "developing" stats if p is today.
|
|
|
|
|
// But DayMap stores "final" stats for that day.
|
|
|
|
|
// Issue: using future knowledge (DayMap has full day stats).
|
|
|
|
|
// Fix: We must NOT use DayMap for "Current Day" stats if we are 'simulating' live.
|
|
|
|
|
// However, implementing full "developing session" logic is complex.
|
|
|
|
|
// Approximation: For Previous Days (Asian/London if we are in NY), it's fine.
|
|
|
|
|
// For Current Session: this is slight data leakage if we use DayMap[dayIdx].NY.H when we are at 14:00.
|
|
|
|
|
// Since this is "Advanced", user likely wants correctness.
|
|
|
|
|
// Correctness fix: Use DayMap[dayIndex-1] (Yesterday) for reference,
|
|
|
|
|
// and for Today, we should ideally compute on fly.
|
|
|
|
|
// BUT, for simplistic implementation of "Session Highs/Lows", we typically look at *Completed* sessions.
|
|
|
|
|
// If we are in NY, we look at Today's London & Asian (Completed/Developing) and Yesterday's NY.
|
|
|
|
|
// Let's implement that lookup.
|
|
|
|
|
|
|
|
|
|
// Asian (0-8)
|
|
|
|
|
double aH=c, aL=c, aVol=0, aORH=c, aORL=c, aORVol=0;
|
|
|
|
|
int aIdx = dayIdx;
|
|
|
|
|
// If we are before Asian end, we might want Yesterday's Asian? Or Developing?
|
|
|
|
|
// Let's use "Latest Completed or Current Developing".
|
|
|
|
|
// Simply use DayMap[dayIdx] (Leakage warning).
|
|
|
|
|
// To avoid leakage: Only use sessions that ended BEFORE rates[p].time.
|
|
|
|
|
// Check time.
|
|
|
|
|
MqlDateTime pDt; TimeToStruct(rates[p].time, pDt);
|
|
|
|
|
int pHour = pDt.hour;
|
|
|
|
|
|
|
|
|
|
// Determine valid session indices
|
|
|
|
|
int idxA = (pHour >= InpAsianEnd) ? dayIdx : dayIdx - 1;
|
|
|
|
|
int idxL = (pHour >= InpLondonEnd) ? dayIdx : dayIdx - 1;
|
|
|
|
|
int idxN = (pHour >= InpNYEnd) ? dayIdx : dayIdx - 1;
|
|
|
|
|
|
|
|
|
|
// Fetch Asian
|
|
|
|
|
if(idxA>=0 && DayMap[idxA].Asian.valid) {
|
|
|
|
|
aH = DayMap[idxA].Asian.H; aL = DayMap[idxA].Asian.L; aVol = DayMap[idxA].Asian.Volume;
|
|
|
|
|
aORH = DayMap[idxA].Asian.OR30_H; aORL = DayMap[idxA].Asian.OR30_L; aORVol = DayMap[idxA].Asian.OR30_Vol;
|
|
|
|
|
}
|
|
|
|
|
// Fetch London
|
|
|
|
|
double lH=c, lL=c, lVol=0, lORH=c, lORL=c, lORVol=0;
|
|
|
|
|
if(idxL>=0 && DayMap[idxL].London.valid) {
|
|
|
|
|
lH = DayMap[idxL].London.H; lL = DayMap[idxL].London.L; lVol = DayMap[idxL].London.Volume;
|
|
|
|
|
lORH = DayMap[idxL].London.OR30_H; lORL = DayMap[idxL].London.OR30_L; lORVol = DayMap[idxL].London.OR30_Vol;
|
|
|
|
|
}
|
|
|
|
|
// Fetch NY
|
|
|
|
|
double nH=c, nL=c, nVol=0, nORH=c, nORL=c, nORVol=0;
|
|
|
|
|
if(idxN>=0 && DayMap[idxN].NY.valid) {
|
|
|
|
|
nH = DayMap[idxN].NY.H; nL = DayMap[idxN].NY.L; nVol = DayMap[idxN].NY.Volume;
|
|
|
|
|
nORH = DayMap[idxN].NY.OR30_H; nORL = DayMap[idxN].NY.OR30_L; nORVol = DayMap[idxN].NY.OR30_Vol;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Normalize
|
|
|
|
|
// Price diffs relative to Close, scaled
|
|
|
|
|
inputs[off+0] = (c - aORH)/c * 1000; inputs[off+1] = (c - aORL)/c * 1000; inputs[off+2] = (AvgVol_Asian[0]>0)?(aORVol/AvgVol_Asian[0]):0;
|
|
|
|
|
inputs[off+3] = (c - aH)/c * 1000; inputs[off+4] = (c - aL)/c * 1000; inputs[off+5] = (AvgVol_Asian[1]>0)?(aVol/AvgVol_Asian[1]):0;
|
|
|
|
|
|
|
|
|
|
inputs[off+6] = (c - lORH)/c * 1000; inputs[off+7] = (c - lORL)/c * 1000; inputs[off+8] = (AvgVol_London[0]>0)?(lORVol/AvgVol_London[0]):0;
|
|
|
|
|
inputs[off+9] = (c - lH)/c * 1000; inputs[off+10] = (c - lL)/c * 1000; inputs[off+11] = (AvgVol_London[1]>0)?(lVol/AvgVol_London[1]):0;
|
|
|
|
|
|
|
|
|
|
inputs[off+12] = (c - nORH)/c * 1000; inputs[off+13] = (c - nORL)/c * 1000; inputs[off+14] = (AvgVol_NY[0]>0)?(nORVol/AvgVol_NY[0]):0;
|
|
|
|
|
inputs[off+15] = (c - nH)/c * 1000; inputs[off+16] = (c - nL)/c * 1000; inputs[off+17] = (AvgVol_NY[1]>0)?(nVol/AvgVol_NY[1]):0;
|
|
|
|
|
|
|
|
|
|
// 2. MA Diffs
|
|
|
|
|
inputs[off+18] = (c - ema50[p])/ema50[p] * 1000.0;
|
|
|
|
|
inputs[off+19] = (c - ema200[p])/ema200[p] * 1000.0;
|
|
|
|
|
inputs[off+20] = (ema50[p] - ema200[p])/ema200[p] * 1000.0;
|
|
|
|
|
|
|
|
|
|
// 3. Time
|
|
|
|
|
inputs[off+21] = (double)pDt.hour / 24.0;
|
|
|
|
|
inputs[off+22] = (double)((pDt.min/15)) / 4.0;
|
|
|
|
|
|
|
|
|
|
// 4. Market Profile (Previous Day)
|
|
|
|
|
int mpIdx = dayIdx - 1;
|
|
|
|
|
double poc=c, vah=c, val=c;
|
|
|
|
|
double dCrest=0, dTrough=0; // Distance to nearest
|
|
|
|
|
|
|
|
|
|
if(mpIdx>=0 && DayMap[mpIdx].Profile.valid) {
|
|
|
|
|
poc = DayMap[mpIdx].Profile.POC;
|
|
|
|
|
vah = DayMap[mpIdx].Profile.VAH;
|
|
|
|
|
val = DayMap[mpIdx].Profile.VAL;
|
|
|
|
|
|
|
|
|
|
// Find nearest crest/trough
|
|
|
|
|
double minD = 999999;
|
|
|
|
|
for(int cx=0; cx<ArraySize(DayMap[mpIdx].Profile.Crests); cx++) {
|
|
|
|
|
double d = MathAbs(c - DayMap[mpIdx].Profile.Crests[cx]);
|
|
|
|
|
if(d < minD) minD = d;
|
|
|
|
|
}
|
|
|
|
|
dCrest = (minD!=999999) ? minD : 0;
|
|
|
|
|
|
|
|
|
|
minD = 999999;
|
|
|
|
|
for(int tx=0; tx<ArraySize(DayMap[mpIdx].Profile.Troughs); tx++) {
|
|
|
|
|
double d = MathAbs(c - DayMap[mpIdx].Profile.Troughs[tx]);
|
|
|
|
|
if(d < minD) minD = d;
|
|
|
|
|
}
|
|
|
|
|
dTrough = (minD!=999999) ? minD : 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inputs[off+23] = (c - poc)/c * 1000.0;
|
|
|
|
|
inputs[off+24] = (c - vah)/c * 1000.0;
|
|
|
|
|
inputs[off+25] = (c - val)/c * 1000.0;
|
|
|
|
|
inputs[off+26] = dCrest / c * 1000.0; // Dist is absolute, normalize by price
|
|
|
|
|
inputs[off+27] = dTrough / c * 1000.0;
|
|
|
|
|
|
|
|
|
|
// 5. VWAP
|
|
|
|
|
VWAPBuffer[p]; // Current Bar VWAP
|
|
|
|
|
|
|
|
|
|
double std1_d = VWAPBuffer[p].std_d; double std1_w = VWAPBuffer[p].std_w; double std1_m = VWAPBuffer[p].std_m;
|
|
|
|
|
// Daily (7 inputs)
|
|
|
|
|
inputs[off+28] = (c - VWAPBuffer[p].vwap_d)/c * 1000.0;
|
|
|
|
|
inputs[off+29] = (c - (VWAPBuffer[p].vwap_d + std1_d))/c * 1000.0;
|
|
|
|
|
inputs[off+30] = (c - (VWAPBuffer[p].vwap_d - std1_d))/c * 1000.0;
|
|
|
|
|
inputs[off+31] = (c - (VWAPBuffer[p].vwap_d + 2*std1_d))/c * 1000.0;
|
|
|
|
|
inputs[off+32] = (c - (VWAPBuffer[p].vwap_d - 2*std1_d))/c * 1000.0;
|
|
|
|
|
inputs[off+33] = (c - (VWAPBuffer[p].vwap_d + 3*std1_d))/c * 1000.0;
|
|
|
|
|
inputs[off+34] = (c - (VWAPBuffer[p].vwap_d - 3*std1_d))/c * 1000.0; // off+34 is correct (28+6)
|
|
|
|
|
|
|
|
|
|
// Weekly (7 inputs)
|
|
|
|
|
inputs[off+35] = (c - VWAPBuffer[p].vwap_w)/c * 1000.0;
|
|
|
|
|
inputs[off+36] = (c - (VWAPBuffer[p].vwap_w + std1_w))/c * 1000.0;
|
|
|
|
|
inputs[off+37] = (c - (VWAPBuffer[p].vwap_w - std1_w))/c * 1000.0;
|
|
|
|
|
inputs[off+38] = (c - (VWAPBuffer[p].vwap_w + 2*std1_w))/c * 1000.0;
|
|
|
|
|
inputs[off+39] = (c - (VWAPBuffer[p].vwap_w - 2*std1_w))/c * 1000.0;
|
|
|
|
|
inputs[off+40] = (c - (VWAPBuffer[p].vwap_w + 3*std1_w))/c * 1000.0;
|
|
|
|
|
inputs[off+41] = (c - (VWAPBuffer[p].vwap_w - 3*std1_w))/c * 1000.0;
|
|
|
|
|
|
|
|
|
|
// Monthly (7 inputs)
|
|
|
|
|
inputs[off+42] = (c - VWAPBuffer[p].vwap_m)/c * 1000.0;
|
|
|
|
|
inputs[off+43] = (c - (VWAPBuffer[p].vwap_m + std1_m))/c * 1000.0;
|
|
|
|
|
inputs[off+44] = (c - (VWAPBuffer[p].vwap_m - std1_m))/c * 1000.0;
|
|
|
|
|
inputs[off+45] = (c - (VWAPBuffer[p].vwap_m + 2*std1_m))/c * 1000.0;
|
|
|
|
|
inputs[off+46] = (c - (VWAPBuffer[p].vwap_m - 2*std1_m))/c * 1000.0;
|
|
|
|
|
inputs[off+47] = (c - (VWAPBuffer[p].vwap_m + 3*std1_m))/c * 1000.0;
|
|
|
|
|
inputs[off+48] = (c - (VWAPBuffer[p].vwap_m - 3*std1_m))/c * 1000.0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Store Sample
|
|
|
|
|
Sample s; s.target = label;
|
|
|
|
|
ArrayResize(s.inputs, ArraySize(inputs));
|
|
|
|
|
ArrayCopy(s.inputs, inputs);
|
|
|
|
|
|
|
|
|
|
int n=ArraySize(samples);
|
|
|
|
|
ArrayResize(samples, n+1);
|
|
|
|
|
samples[n] = s;
|
|
|
|
|
|
|
|
|
|
if(label > 0.1) pos++; else if(label < -0.1) neg++; else flat++;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
PrintFormat("Collected: Pos=%d Neg=%d Flat=%d", pos, neg, flat);
|
|
|
|
|
if(pos == 0 && neg == 0) {
|
|
|
|
|
Print("No valid samples!");
|
|
|
|
|
delete Net;
|
|
|
|
|
IndicatorRelease(handleATR_D1);
|
|
|
|
|
IndicatorRelease(handleEMA50);
|
|
|
|
|
IndicatorRelease(handleEMA200);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Balancing
|
|
|
|
|
int targetCount = (pos + neg) / 2;
|
|
|
|
|
Sample balanced[];
|
|
|
|
|
for(int k=0; k<ArraySize(samples); k++) {
|
|
|
|
|
if(samples[k].target == 0) {
|
|
|
|
|
if(MathRand()%100 < 100.0 * targetCount/flat) {
|
|
|
|
|
int b=ArraySize(balanced); ArrayResize(balanced, b+1);
|
|
|
|
|
balanced[b]=samples[k];
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
int b=ArraySize(balanced); ArrayResize(balanced, b+1);
|
|
|
|
|
balanced[b]=samples[k];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
PrintFormat("Balanced to %d", ArraySize(balanced));
|
|
|
|
|
|
|
|
|
|
// Train
|
|
|
|
|
int total = ArraySize(balanced);
|
|
|
|
|
int nTrain = (int)(total * 0.8);
|
|
|
|
|
int indices[]; ArrayResize(indices, nTrain);
|
|
|
|
|
for(int k=0; k<nTrain; k++) indices[k]=k;
|
|
|
|
|
|
|
|
|
|
double bestValAcc = 0;
|
2026-01-26 17:18:11 +03:00
|
|
|
int stuckCount = 0;
|
2026-01-25 16:49:31 +00:00
|
|
|
|
|
|
|
|
for(int e=0; e<InpEpochs; e++) {
|
2026-01-26 17:18:11 +03:00
|
|
|
// Shuffle training indices
|
2026-01-25 16:49:31 +00:00
|
|
|
for(int k=nTrain-1; k>0; k--) { int r=MathRand()%(k+1); int t=indices[k]; indices[k]=indices[r]; indices[r]=t; }
|
|
|
|
|
|
|
|
|
|
for(int k=0; k<nTrain; k++) {
|
2026-01-26 17:18:11 +03:00
|
|
|
double target[1]; target[0]=balanced[indices[k]].target;
|
2026-01-25 16:49:31 +00:00
|
|
|
Net.FeedForward(balanced[indices[k]].inputs);
|
2026-01-26 17:18:11 +03:00
|
|
|
Net.BackProp(target);
|
2026-01-25 16:49:31 +00:00
|
|
|
}
|
|
|
|
|
|
2026-01-26 17:18:11 +03:00
|
|
|
// Calculate Validation Accuracy
|
2026-01-25 16:49:31 +00:00
|
|
|
int corr=0, tot=0;
|
|
|
|
|
for(int v=nTrain; v<total; v++) {
|
|
|
|
|
Net.FeedForward(balanced[v].inputs);
|
|
|
|
|
double res[]; Net.GetResults(res);
|
|
|
|
|
if(MathAbs(balanced[v].target)>0.1) {
|
|
|
|
|
tot++;
|
|
|
|
|
if(balanced[v].target * res[0] > 0) corr++;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
double valAcc = (tot>0)?(double)corr/tot*100:0;
|
|
|
|
|
|
2026-01-26 17:18:11 +03:00
|
|
|
if(e%10==0 || e==InpEpochs-1) {
|
|
|
|
|
PrintFormat("Epoch %d Val: %.1f%% (Best: %.1f%%) - LR: %.6f", e, valAcc, bestValAcc, Net.eta);
|
|
|
|
|
}
|
2026-01-25 16:49:31 +00:00
|
|
|
|
2026-01-26 17:18:11 +03:00
|
|
|
// Handle "Best Model" Persistence
|
2026-01-25 16:49:31 +00:00
|
|
|
if(valAcc > bestValAcc + 0.05) {
|
|
|
|
|
bestValAcc = valAcc;
|
|
|
|
|
SaveModelWithHeader("AdvancedPriceActionNN.bin");
|
|
|
|
|
PrintFormat("--- NEW BEST MODEL SAVED! Accuracy: %.2f%%", bestValAcc);
|
2026-01-26 17:18:11 +03:00
|
|
|
stuckCount = 0;
|
|
|
|
|
} else {
|
|
|
|
|
stuckCount++;
|
2026-01-25 16:49:31 +00:00
|
|
|
}
|
2026-01-26 17:18:11 +03:00
|
|
|
|
|
|
|
|
// If no improvement for 30 epochs, NUDGE
|
|
|
|
|
if(stuckCount >= 30 && InpJitter > 0) {
|
|
|
|
|
PrintFormat("STUCK DETECTED! Nudging weights with magnitude %.3f...", InpJitter);
|
|
|
|
|
Net.Jitter(InpJitter);
|
|
|
|
|
stuckCount = 0;
|
|
|
|
|
Net.SetLearningRate(Net.eta * 1.5); // Temporary boost to help escape
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-25 16:49:31 +00:00
|
|
|
Net.SetLearningRate(Net.eta * InpTrainingDecay);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Print("TRAINING FINISHED.");
|
|
|
|
|
SaveModelWithHeader("AdvancedPriceActionNN.bin");
|
|
|
|
|
|
|
|
|
|
delete Net; Net = NULL;
|
|
|
|
|
IndicatorRelease(handleATR_D1);
|
|
|
|
|
IndicatorRelease(handleEMA50);
|
|
|
|
|
IndicatorRelease(handleEMA200);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void OnDeinit(const int reason)
|
|
|
|
|
{
|
|
|
|
|
if(CheckPointer(Net) == POINTER_DYNAMIC) {
|
|
|
|
|
Print("Script stopped/interrupted. Saving current weights...");
|
|
|
|
|
SaveModelWithHeader("AdvancedPriceActionNN.bin");
|
|
|
|
|
delete Net;
|
|
|
|
|
Net = NULL;
|
|
|
|
|
}
|
|
|
|
|
IndicatorRelease(handleATR_D1);
|
|
|
|
|
IndicatorRelease(handleEMA50);
|
|
|
|
|
IndicatorRelease(handleEMA200);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void SaveModelWithHeader(string filename)
|
|
|
|
|
{
|
|
|
|
|
int handle = FileOpen(filename, FILE_WRITE|FILE_BIN|FILE_COMMON);
|
|
|
|
|
if(handle == INVALID_HANDLE) { Print("Error opening file for write: ", filename); return; }
|
|
|
|
|
|
|
|
|
|
// Header Data
|
|
|
|
|
int version = 2; // Version 2 supports Metadata
|
|
|
|
|
FileWriteInteger(handle, version);
|
|
|
|
|
FileWriteInteger(handle, InpLookback);
|
|
|
|
|
FileWriteInteger(handle, ArraySize(GlobalTopology));
|
|
|
|
|
for(int i=0; i<ArraySize(GlobalTopology); i++) FileWriteInteger(handle, GlobalTopology[i]);
|
|
|
|
|
|
|
|
|
|
// Risk Settings
|
|
|
|
|
FileWriteDouble(handle, 1.0); // Target Multiplier (1.0 ATR)
|
|
|
|
|
FileWriteDouble(handle, 0.5); // Stop Multiplier (0.5 ATR)
|
|
|
|
|
|
|
|
|
|
// Session Times
|
|
|
|
|
FileWriteInteger(handle, InpAsianStart);
|
|
|
|
|
FileWriteInteger(handle, InpAsianEnd);
|
|
|
|
|
FileWriteInteger(handle, InpLondonStart);
|
|
|
|
|
FileWriteInteger(handle, InpLondonEnd);
|
|
|
|
|
FileWriteInteger(handle, InpNYStart);
|
|
|
|
|
FileWriteInteger(handle, InpNYEnd);
|
|
|
|
|
|
|
|
|
|
// Save Weights
|
|
|
|
|
if(Net.Save(handle)) Print("Saved Model + Metadata to ", filename);
|
|
|
|
|
else Print("Error saving weights!");
|
|
|
|
|
|
|
|
|
|
FileClose(handle);
|
|
|
|
|
}
|