//+------------------------------------------------------------------+ //| WNN.mqh | //| Santiago Cruz, AlgoNet Inc. | //| https://www.mql5.com/en/users/algo-trader/ | //+------------------------------------------------------------------+ #property copyright "Santiago Cruz, AlgoNet Inc." #property link "https://www.mql5.com/en/users/algo-trader/" #include //--- WNN (Wavelet Neural Network) class WNN { protected: int m_deep; int m_deepth; string m_Symbol; double close[]; matrix m_input; matrix m_pred_input; ENUM_TIMEFRAMES m_TF; matrix m_z_2; matrix m_a_2; matrix m_z_3; matrix m_yHat; double y_cor; double m_alpha; public: matrix W_1; matrix W_2; matrix W_1_LSTM; //--- WNN Constructor //--- The constructor WNN initializes the neural network with parameters like the symbol, //--- timeframe, history depth, number of neurons, and learning rate (alpha). WNN(string Symbol_, ENUM_TIMEFRAMES TimeFrame, int History_Depth, int Number_of_Neurons, double alpha); //--- These functions implement the sigmoid activation function and its derivative, respectively. double Sigmoid(double x); double Sigmoid_Prime(double x); //--- This function returns the sign of a value, either +1 or -1. int Sgn( double Value); //--- This function initializes a matrix with random values drawn from a normal distribution. void MatrixRandom(matrix& m); //--- These functions apply the sigmoid function and its derivative to each element of a matrix, respectively. matrix MatrixSigmoid(matrix& m); matrix MatrixSigmoidPrime(matrix& m); matrix Forward_Prop(); double Cost(); void UpdateValues(int shift); void Train(int shift); double Prediction(); }; WNN::WNN(string Symbol_,ENUM_TIMEFRAMES TimeFrame,int History_Depth,int Number_of_Neurons,double alpha) { m_Symbol = Symbol_; m_deepth = History_Depth; m_deep = Number_of_Neurons; m_TF = TimeFrame; m_alpha = alpha; matrix random_LSTM(1,m_deep); matrix random_W1(m_deepth,m_deep); matrix random_W2(m_deep,1); MatrixRandom(random_W1); MatrixRandom(random_W2); MatrixRandom(random_LSTM); W_1 = random_W1; W_2 = random_W2; W_1_LSTM = random_LSTM; ArrayResize(close,m_deepth+5,0); m_yHat.Init(1,1); m_yHat[0][0] = 0; y_cor = -1; } double WNN::Prediction(void) { matrix pred_z_2 = m_pred_input.MatMul(W_1) + W_1_LSTM; matrix pred_a_2 = MatrixSigmoid(pred_z_2); matrix pred_z_3 = pred_a_2.MatMul(W_2); matrix pred_yHat = MatrixSigmoid(pred_z_3); return m_yHat[0][0]; } void WNN::Train(int shift) { bool Train_condition = true; UpdateValues(shift); while(Train_condition) { m_yHat = Forward_Prop(); double J = Cost(); if(J < m_alpha) { Train_condition = false; } matrix X_m_matrix = {{y_cor}}; matrix cost = -1*(X_m_matrix - m_yHat); matrix z_3_prime = MatrixSigmoidPrime(m_z_3); matrix delta3 = cost.MatMul(z_3_prime); matrix dJdW2 = m_a_2.Transpose().MatMul(delta3); matrix z_2_prime = MatrixSigmoidPrime(m_z_2); matrix delta2 = delta3.MatMul(W_2.Transpose())*z_2_prime; matrix dJdW1 = m_input.Transpose().MatMul(delta2); W_1 = W_1 - dJdW1; W_2 = W_2 - dJdW2; } W_1_LSTM = m_input.MatMul(W_1); } void WNN::UpdateValues(int shift) { for(int i=0 ; i 0) { RES = 1; } else { RES = -1; } return RES; } //--- This function initializes each element of the matrix m with random values drawn //--- from a normal distribution with mean 0 and standard deviation 1. void WNN::MatrixRandom(matrix &m) { int error; for(ulong r=0; r