//+------------------------------------------------------------------+ //| TrainAdvancedPriceActionNN.mq5 | //| Copyright 2025, Google Gemini | //| https://www.mql5.com | //+------------------------------------------------------------------+ #property copyright "Copyright 2025, Google Gemini" #property link "https://www.google.com" #property version "1.00" #property script_show_inputs #include input int InpEpochs = 500; input int InpLookback = 144; input string InpHiddenLayerConfig = "256,128,64"; input double InpTrainingRate = 0.001; input double InpTrainingDecay = 0.98; input double InpLambda = 0.0001; // L2 Regularization input double InpJitter = 0.05; // Nudge magnitude when stuck input int InpMaxBars = 100000; // Reduced default for memory safety with new features input int InpPredictionHorizon = 72; // Session Times (Broker Server Hour) input int InpAsianStart = 0; input int InpAsianEnd = 8; input int InpLondonStart = 8; input int InpLondonEnd = 16; input int InpNYStart = 13; input int InpNYEnd = 22; CNeuralNet *Net; int GlobalTopology[]; int handleATR_D1, handleEMA50, handleEMA200; // --- Structures --- struct Sample { double inputs[]; double target; }; struct SessionStats { double H, L; double Volume; // Total volume double OR30_H, OR30_L; double OR30_Vol; bool valid; }; struct DailyProfile { double POC; double VAH, VAL; double Crests[]; // Prices of High Volume Nodes double Troughs[]; // Prices of Low Volume Nodes bool valid; }; struct SessionData { SessionStats Asian; SessionStats London; SessionStats NY; DailyProfile Profile; // Previous Day's Profile ? Or Current? We usuall use Previous Day for limits. }; // Maps DayIndex -> Data SessionData DayMap[]; datetime baseTime; // VWAP Arrays (Parallel to Rates) // Dim 0: Daily, 1: Weekly, 2: Monthly // We store: VWAP, StDev struct VWAPPoint { double vwap_d, std_d; double vwap_w, std_w; double vwap_m, std_m; }; VWAPPoint VWAPBuffer[]; // Rolling Averages for Volume Normalization double AvgVol_Asian[2], AvgVol_London[2], AvgVol_NY[2]; // Index 0: OR30, 1: Full // --- Helper Functions --- int GetDayIndex(datetime t) { return (int)((t - baseTime) / 86400); } // Market Profile Calculation void CalcProfile(const MqlRates &rates[], int start, int end, DailyProfile &out) { if(start >= end) { out.valid=false; return; } // 1. Find Min/Max Price to size histogram double minP = 999999, maxP = -1; double totalVol = 0; for(int i=start; i maxP) maxP = rates[i].high; totalVol += (double)rates[i].tick_volume; } if(maxP <= minP) { out.valid=false; return; } // 2. Build Histogram (Tick precision) // Using _Point. If _Point is small (e.g. 0.00001), range can be large. // Optimize: integer steps of Point double point = _Point; if(point == 0) point = 0.0001; int steps = (int)((maxP - minP) / point) + 1; if(steps > 10000) steps = 10000; // Cap resolution for performance if needed double stepSize = (maxP - minP) / steps; if(stepSize == 0) stepSize = point; double hist[]; ArrayResize(hist, steps); ArrayInitialize(hist, 0); for(int i=start; i= 0 && idx < steps) hist[idx] += (double)rates[i].tick_volume; } // 3. Find POC int pocIdx = 0; double maxVal = -1; for(int i=0; i maxVal) { maxVal = hist[i]; pocIdx = i; } } out.POC = minP + pocIdx * stepSize; // 4. Value Area (70%) double vaVol = totalVol * 0.70; double curVol = maxVal; int up = pocIdx, down = pocIdx; while(curVol < vaVol) { double vUp = (up < steps-1) ? hist[up+1] : 0; double vDown = (down > 0) ? hist[down-1] : 0; if(vUp > vDown) { if(up < steps-1) { up++; curVol += hist[up]; } else if(down > 0) { down--; curVol += hist[down]; } else break; } else { if(down > 0) { down--; curVol += hist[down]; } else if(up < steps-1) { up++; curVol += hist[up]; } else break; } } out.VAH = minP + up * stepSize; out.VAL = minP + down * stepSize; // 5. Crests and Troughs (Simple Peak Detection) // We smooth slightly to avoid noise? Using 3-bin window. ArrayResize(out.Crests, 0); ArrayResize(out.Troughs, 0); for(int i=2; i No, Low High Low) if(v > prev && v > next) { // Check if "Significant"? if(v > maxVal * 0.1) { // Threshold 10% of POC int s = ArraySize(out.Crests); ArrayResize(out.Crests, s+1); out.Crests[s] = minP + i * stepSize; } } // Trough (High Low High) if(v < prev && v < next) { int s = ArraySize(out.Troughs); ArrayResize(out.Troughs, s+1); out.Troughs[s] = minP + i * stepSize; } } out.valid = true; } // Pre-Calculation void PrecomputeData(const MqlRates &rates[], int total) { Print("Pre-calculating advanced features..."); // 1. Setup Time Map datetime first = rates[0].time; // Series is true, 0 is Newest. Wait. // rates array passed here is usually AsSeries=true from OnStart? // Let's assume rates is AS_SERIES. // Iterate BACKWARDS (Oldest to Newest) for accumulators. int oldestIdx = total - 1; datetime oldestTime = rates[oldestIdx].time; baseTime = oldestTime - (oldestTime % 86400); // Floor to day start int numDays = (int)((rates[0].time - baseTime) / 86400) + 5; ArrayResize(DayMap, numDays); // Initialize Days for(int i=0; i Newest for(int i=total-1; i>=0; i--) { MqlDateTime dt; TimeToStruct(rates[i].time, dt); int dayIdx = GetDayIndex(rates[i].time); // --- VWAP Logic --- // Daily Reset if(dt.day_of_year != currDay) { sumPV_d=0; sumV_d=0; sumSqPV_d=0; currDay = dt.day_of_year; // Market Profile Calculation for the COMPLETED day // (DayIdx - 1). This is expensive if we scan rates. // Strategy: We will do a second pass for profiles or just fill them day by day. // Better: Scan day boundaries. } // Weekly Reset (New Week or Sunday/Monday transition) // Simple check: current time < previous time? No, forward iteration. // If day_of_week drops (e.g. 5 -> 1), it's new week. int weekNum = (int)(rates[i].time / 604800); // Crude week index if(weekNum != currWeek) { sumPV_w=0; sumV_w=0; sumSqPV_w=0; currWeek = weekNum; } // Monthly Reset if(dt.mon != currMonth) { sumPV_m=0; sumV_m=0; sumSqPV_m=0; currMonth = dt.mon; } double price = rates[i].close; // Using Close for VWAP? Typically (H+L+C)/3 double typPrice = (rates[i].high + rates[i].low + rates[i].close) / 3.0; double vol = (double)rates[i].tick_volume; // Update D sumPV_d += typPrice * vol; sumV_d += vol; double vwap_d = (sumV_d > 0) ? sumPV_d / sumV_d : typPrice; // Variance: For simplicity we use Variance of Price? Or VWAP Variance? // Standard deviation of Price relative to VWAP? // StdDev = Sqrt( (Sum(Vol * (Price - VWAP)^2) ) / Sum(Vol) ) // = Sqrt( (Sum(Vol * Price^2) / SumVol) - VWAP^2 ) sumSqPV_d += vol * typPrice * typPrice; double var_d = (sumV_d > 0) ? (sumSqPV_d / sumV_d) - (vwap_d * vwap_d) : 0; // Update W sumPV_w += typPrice * vol; sumV_w += vol; sumSqPV_w += vol * typPrice * typPrice; double vwap_w = (sumV_w > 0) ? sumPV_w / sumV_w : typPrice; double var_w = (sumV_w > 0) ? (sumSqPV_w / sumV_w) - (vwap_w * vwap_w) : 0; // Update M sumPV_m += typPrice * vol; sumV_m += vol; sumSqPV_m += vol * typPrice * typPrice; double vwap_m = (sumV_m > 0) ? sumPV_m / sumV_m : typPrice; double var_m = (sumV_m > 0) ? (sumSqPV_m / sumV_m) - (vwap_m * vwap_m) : 0; VWAPBuffer[i].vwap_d = vwap_d; VWAPBuffer[i].std_d = MathSqrt(MathMax(0, var_d)); VWAPBuffer[i].vwap_w = vwap_w; VWAPBuffer[i].std_w = MathSqrt(MathMax(0, var_w)); VWAPBuffer[i].vwap_m = vwap_m; VWAPBuffer[i].std_m = MathSqrt(MathMax(0, var_m)); // --- Session Logic --- // Stats for TODAY (DayIdx). // We are building DayMap[dayIdx] cumulatively. int h = dt.hour; int m = dt.min; double minsFromStart = 0; // Asian if(h >= InpAsianStart && h < InpAsianEnd) { if(DayMap[dayIdx].Asian.H == -1 || rates[i].high > DayMap[dayIdx].Asian.H) DayMap[dayIdx].Asian.H = rates[i].high; if(rates[i].low < DayMap[dayIdx].Asian.L) DayMap[dayIdx].Asian.L = rates[i].low; DayMap[dayIdx].Asian.Volume += vol; DayMap[dayIdx].Asian.valid = true; // OR30 check (first 30 mins) int sessionMin = (h - InpAsianStart)*60 + m; if(sessionMin < 30) { if(DayMap[dayIdx].Asian.OR30_H == 0 || rates[i].high > DayMap[dayIdx].Asian.OR30_H) DayMap[dayIdx].Asian.OR30_H = rates[i].high; if(DayMap[dayIdx].Asian.OR30_L == 0 || rates[i].low < DayMap[dayIdx].Asian.OR30_L || DayMap[dayIdx].Asian.OR30_L==0) DayMap[dayIdx].Asian.OR30_L = rates[i].low; DayMap[dayIdx].Asian.OR30_Vol += vol; } } // London if(h >= InpLondonStart && h < InpLondonEnd) { if(DayMap[dayIdx].London.H == -1 || rates[i].high > DayMap[dayIdx].London.H) DayMap[dayIdx].London.H = rates[i].high; if(rates[i].low < DayMap[dayIdx].London.L) DayMap[dayIdx].London.L = rates[i].low; DayMap[dayIdx].London.Volume += vol; DayMap[dayIdx].London.valid = true; int sessionMin = (h - InpLondonStart)*60 + m; if(sessionMin < 30) { if(DayMap[dayIdx].London.OR30_H == 0 || rates[i].high > DayMap[dayIdx].London.OR30_H) DayMap[dayIdx].London.OR30_H = rates[i].high; if(DayMap[dayIdx].London.OR30_L == 0 || rates[i].low < DayMap[dayIdx].London.OR30_L || DayMap[dayIdx].London.OR30_L==0) DayMap[dayIdx].London.OR30_L = rates[i].low; DayMap[dayIdx].London.OR30_Vol += vol; } } // NY if(h >= InpNYStart && h < InpNYEnd) { if(DayMap[dayIdx].NY.H == -1 || rates[i].high > DayMap[dayIdx].NY.H) DayMap[dayIdx].NY.H = rates[i].high; if(rates[i].low < DayMap[dayIdx].NY.L) DayMap[dayIdx].NY.L = rates[i].low; DayMap[dayIdx].NY.Volume += vol; DayMap[dayIdx].NY.valid = true; int sessionMin = (h - InpNYStart)*60 + m; if(sessionMin < 30) { if(DayMap[dayIdx].NY.OR30_H == 0 || rates[i].high > DayMap[dayIdx].NY.OR30_H) DayMap[dayIdx].NY.OR30_H = rates[i].high; if(DayMap[dayIdx].NY.OR30_L == 0 || rates[i].low < DayMap[dayIdx].NY.OR30_L || DayMap[dayIdx].NY.OR30_L==0) DayMap[dayIdx].NY.OR30_L = rates[i].low; DayMap[dayIdx].NY.OR30_Vol += vol; } } } // Post-Pass: Calculate Market Profile per Day and Averages // For MP, we need to scan rates again per day. // Also collect Volumes for Avg Calculation. double list_AsianOR[], list_AsianTot[]; double list_LondonOR[], list_LondonTot[]; double list_NYOR[], list_NYTot[]; // Identify Day Boundaries in Rates for fast MP calc int dayStartIdx = total-1; // Oldest int currentDayIdx = GetDayIndex(rates[total-1].time); for(int i=total-1; i>=-1; i--) { // Scan to -1 to close last day int d = (i>=0) ? GetDayIndex(rates[i].time) : -1; if(d != currentDayIdx) { // Process completed day 'currentDayIdx' (from dayStartIdx downto i+1) if(currentDayIdx >= 0 && currentDayIdx < numDays) { int idxStart = i + 1; int idxEnd = dayStartIdx + 1; // End is exclusive in our logic usually // Wait, indices are decreasing. i is "future" (smaller index). // Start=i+1 (00:00), End=DayStartIdx (23:59). // CalcProfile expects Start < End for array access? // My CalcProfile loop: for(int k=start; k Rolling. // I will pre-compute strict 20-day rolling averages per day index. // But for now, to save code size, I'll use Global Average of the entire dataset. // Wait, seasonality matters. Let's do a simple rolling average map? // Too complex for single script. // Fallback: Global Average. AvgVol_Asian[0] = MathMean(list_AsianOR); AvgVol_Asian[1] = MathMean(list_AsianTot); AvgVol_London[0] = MathMean(list_LondonOR); AvgVol_London[1] = MathMean(list_LondonTot); AvgVol_NY[0] = MathMean(list_NYOR); AvgVol_NY[1] = MathMean(list_NYTot); Print("Averages Calculated. Asian Full: ", AvgVol_Asian[1], " London Full: ", AvgVol_London[1]); } double MathMean(double &arr[]) { if(ArraySize(arr)==0) return 1; double s=0; for(int i=0; i 7 inputs each) = 21 // Total: 9+9+3+2+5+21 = 49 inputs. int inputSize = 49 * InpLookback; // Wait, features per bar? // Plan implies we feed these features FOR EACH BAR in Lookback? // That's 49 * 144 = 7056 inputs. Getting large. // Or features of the "Check Moment"? // Standard sequential model feeds features per timestep. // Yes, features per bar. int featuresPerBar = 49; // Topology string result[]; int hiddenLayerCount = StringSplit(InpHiddenLayerConfig, ',', result); ArrayResize(GlobalTopology, 1 + hiddenLayerCount + 1); GlobalTopology[0] = InpLookback * featuresPerBar; for(int i=0; i InpMaxBars) endIdx = startIdx + InpMaxBars; Print("Starting Sample Collection..."); for(int i = startIdx; i <= endIdx; i++) { // Stride: 20 mins MqlDateTime dt; TimeToStruct(rates[i].time, dt); //if(dt.min % 20 != 0) continue; // Labeling (Same as before) double d1ATR_Arr[1]; if(CopyBuffer(handleATR_D1, 0, rates[i].time, 1, d1ATR_Arr)<=0) continue; double dailyATR = d1ATR_Arr[0]; double targetTP = 1.0 * dailyATR; double targetSL = 0.5 * dailyATR; double entryPrice = rates[i].close; double label = 0; int firstBuyTP = 99999, firstBuySL = 99999; int firstSellTP = 99999, firstSellSL = 99999; for(int k=1; k<=InpPredictionHorizon; k++) { int f = i-k; if(f<0) break; double h = rates[f].high; double l = rates[f].low; if(firstBuyTP==99999 && h >= entryPrice + targetTP) firstBuyTP = k; if(firstBuySL==99999 && l <= entryPrice - targetSL) firstBuySL = k; if(firstSellTP==99999 && l <= entryPrice - targetTP) firstSellTP = k; if(firstSellSL==99999 && h >= entryPrice + targetSL) firstSellSL = k; } if(firstBuyTP < firstBuySL) label = 1.0; // Buy Win else if(firstSellTP < firstSellSL) label = -1.0; // Sell Win else label = 0.0; // Feature Construction double inputs[]; ArrayResize(inputs, InpLookback * featuresPerBar); for(int k=0; k= copied) break; int off = k * featuresPerBar; double c = rates[p].close; // 1. Session Stats int dayIdx = GetDayIndex(rates[p].time); // For current day stats, we use the "developing" stats if p is today. // But DayMap stores "final" stats for that day. // Issue: using future knowledge (DayMap has full day stats). // Fix: We must NOT use DayMap for "Current Day" stats if we are 'simulating' live. // However, implementing full "developing session" logic is complex. // Approximation: For Previous Days (Asian/London if we are in NY), it's fine. // For Current Session: this is slight data leakage if we use DayMap[dayIdx].NY.H when we are at 14:00. // Since this is "Advanced", user likely wants correctness. // Correctness fix: Use DayMap[dayIndex-1] (Yesterday) for reference, // and for Today, we should ideally compute on fly. // BUT, for simplistic implementation of "Session Highs/Lows", we typically look at *Completed* sessions. // If we are in NY, we look at Today's London & Asian (Completed/Developing) and Yesterday's NY. // Let's implement that lookup. // Asian (0-8) double aH=c, aL=c, aVol=0, aORH=c, aORL=c, aORVol=0; int aIdx = dayIdx; // If we are before Asian end, we might want Yesterday's Asian? Or Developing? // Let's use "Latest Completed or Current Developing". // Simply use DayMap[dayIdx] (Leakage warning). // To avoid leakage: Only use sessions that ended BEFORE rates[p].time. // Check time. MqlDateTime pDt; TimeToStruct(rates[p].time, pDt); int pHour = pDt.hour; // Determine valid session indices int idxA = (pHour >= InpAsianEnd) ? dayIdx : dayIdx - 1; int idxL = (pHour >= InpLondonEnd) ? dayIdx : dayIdx - 1; int idxN = (pHour >= InpNYEnd) ? dayIdx : dayIdx - 1; // Fetch Asian if(idxA>=0 && DayMap[idxA].Asian.valid) { aH = DayMap[idxA].Asian.H; aL = DayMap[idxA].Asian.L; aVol = DayMap[idxA].Asian.Volume; aORH = DayMap[idxA].Asian.OR30_H; aORL = DayMap[idxA].Asian.OR30_L; aORVol = DayMap[idxA].Asian.OR30_Vol; } // Fetch London double lH=c, lL=c, lVol=0, lORH=c, lORL=c, lORVol=0; if(idxL>=0 && DayMap[idxL].London.valid) { lH = DayMap[idxL].London.H; lL = DayMap[idxL].London.L; lVol = DayMap[idxL].London.Volume; lORH = DayMap[idxL].London.OR30_H; lORL = DayMap[idxL].London.OR30_L; lORVol = DayMap[idxL].London.OR30_Vol; } // Fetch NY double nH=c, nL=c, nVol=0, nORH=c, nORL=c, nORVol=0; if(idxN>=0 && DayMap[idxN].NY.valid) { nH = DayMap[idxN].NY.H; nL = DayMap[idxN].NY.L; nVol = DayMap[idxN].NY.Volume; nORH = DayMap[idxN].NY.OR30_H; nORL = DayMap[idxN].NY.OR30_L; nORVol = DayMap[idxN].NY.OR30_Vol; } // Normalize // Price diffs relative to Close, scaled inputs[off+0] = (c - aORH)/c * 1000; inputs[off+1] = (c - aORL)/c * 1000; inputs[off+2] = (AvgVol_Asian[0]>0)?(aORVol/AvgVol_Asian[0]):0; inputs[off+3] = (c - aH)/c * 1000; inputs[off+4] = (c - aL)/c * 1000; inputs[off+5] = (AvgVol_Asian[1]>0)?(aVol/AvgVol_Asian[1]):0; inputs[off+6] = (c - lORH)/c * 1000; inputs[off+7] = (c - lORL)/c * 1000; inputs[off+8] = (AvgVol_London[0]>0)?(lORVol/AvgVol_London[0]):0; inputs[off+9] = (c - lH)/c * 1000; inputs[off+10] = (c - lL)/c * 1000; inputs[off+11] = (AvgVol_London[1]>0)?(lVol/AvgVol_London[1]):0; inputs[off+12] = (c - nORH)/c * 1000; inputs[off+13] = (c - nORL)/c * 1000; inputs[off+14] = (AvgVol_NY[0]>0)?(nORVol/AvgVol_NY[0]):0; inputs[off+15] = (c - nH)/c * 1000; inputs[off+16] = (c - nL)/c * 1000; inputs[off+17] = (AvgVol_NY[1]>0)?(nVol/AvgVol_NY[1]):0; // 2. MA Diffs inputs[off+18] = (c - ema50[p])/ema50[p] * 1000.0; inputs[off+19] = (c - ema200[p])/ema200[p] * 1000.0; inputs[off+20] = (ema50[p] - ema200[p])/ema200[p] * 1000.0; // 3. Time inputs[off+21] = (double)pDt.hour / 24.0; inputs[off+22] = (double)((pDt.min/15)) / 4.0; // 4. Market Profile (Previous Day) int mpIdx = dayIdx - 1; double poc=c, vah=c, val=c; double dCrest=0, dTrough=0; // Distance to nearest if(mpIdx>=0 && DayMap[mpIdx].Profile.valid) { poc = DayMap[mpIdx].Profile.POC; vah = DayMap[mpIdx].Profile.VAH; val = DayMap[mpIdx].Profile.VAL; // Find nearest crest/trough double minD = 999999; for(int cx=0; cx 0.1) pos++; else if(label < -0.1) neg++; else flat++; } PrintFormat("Collected: Pos=%d Neg=%d Flat=%d", pos, neg, flat); if(pos == 0 && neg == 0) { Print("No valid samples!"); delete Net; IndicatorRelease(handleATR_D1); IndicatorRelease(handleEMA50); IndicatorRelease(handleEMA200); return; } // Balancing int targetCount = (pos + neg) / 2; Sample balanced[]; for(int k=0; k0; k--) { int r=MathRand()%(k+1); int t=indices[k]; indices[k]=indices[r]; indices[r]=t; } for(int k=0; k0.1) { tot++; if(balanced[v].target * res[0] > 0) corr++; } } double valAcc = (tot>0)?(double)corr/tot*100:0; if(e%10==0 || e==InpEpochs-1) { PrintFormat("Epoch %d Val: %.1f%% (Best: %.1f%%) - LR: %.6f", e, valAcc, bestValAcc, Net.eta); } // Handle "Best Model" Persistence if(valAcc > bestValAcc + 0.05) { bestValAcc = valAcc; SaveModelWithHeader("AdvancedPriceActionNN.bin"); PrintFormat("--- NEW BEST MODEL SAVED! Accuracy: %.2f%%", bestValAcc); stuckCount = 0; } else { stuckCount++; } // If no improvement for 30 epochs, NUDGE if(stuckCount >= 30 && InpJitter > 0) { PrintFormat("STUCK DETECTED! Nudging weights with magnitude %.3f...", InpJitter); Net.Jitter(InpJitter); stuckCount = 0; Net.SetLearningRate(Net.eta * 1.5); // Temporary boost to help escape } Net.SetLearningRate(Net.eta * InpTrainingDecay); } Print("TRAINING FINISHED."); SaveModelWithHeader("AdvancedPriceActionNN.bin"); delete Net; Net = NULL; IndicatorRelease(handleATR_D1); IndicatorRelease(handleEMA50); IndicatorRelease(handleEMA200); } void OnDeinit(const int reason) { if(CheckPointer(Net) == POINTER_DYNAMIC) { Print("Script stopped/interrupted. Saving current weights..."); SaveModelWithHeader("AdvancedPriceActionNN.bin"); delete Net; Net = NULL; } IndicatorRelease(handleATR_D1); IndicatorRelease(handleEMA50); IndicatorRelease(handleEMA200); } void SaveModelWithHeader(string filename) { int handle = FileOpen(filename, FILE_WRITE|FILE_BIN|FILE_COMMON); if(handle == INVALID_HANDLE) { Print("Error opening file for write: ", filename); return; } // Header Data int version = 2; // Version 2 supports Metadata FileWriteInteger(handle, version); FileWriteInteger(handle, InpLookback); FileWriteInteger(handle, ArraySize(GlobalTopology)); for(int i=0; i