Miscellaneous
0
Views
0
Downloads
0
Favorites
pnn_verify5
//+------------------------------------------------------------------+
//| A probabilistic neural network (PNN) implementation |
//| |
//| PNN.mq4 |
//| Paco Hernández Gómez |
//| http://www.hernandezgomez.com |
//+------------------------------------------------------------------+
#property copyright "Paco Hernández Gómez"
#property link "http://www.hernandezgomez.com"
double SIGMA = 1;
/**
* Probabilistic neural network data is stored in an array composed by the training vectors and its classified classes.
*
* To optimize speed, the possible classes are numbered from 0 to n, and are store in the first position of each
* trained vector.
*
* pnn[x][0] = Class where the training vector is classified (There are two possible classes (0 - Buy, 1 - Sell)).
* pnn[x][1..n] = Training vector components classified in class pnn[x][0].
*
* In this example, training vectors are going to have 60 different components.
*/
double pnn[0][61];
/**
* Initialize PNN Vectors with 0 traning vectors.
*/
void PNNInit() {
ArrayResize(pnn, 0);
ArrayInitialize(pnn,NULL);
}
/**
* Add a training vector to the trained vectors set and classify it to a class.
*/
void PNNAddVector(int class, double vector[]) {
// Create a new position in the array to store the new training vector and its associated class.
int length = ArrayRange(pnn, 0);
ArrayResize(pnn, length + 1);
// Store the new training vector class
pnn[length][0] = class;
// Store the new training vector
for (int i = 0; i < ArrayRange(vector, 0); i++) {
pnn[length][i + 1] = vector[i];
}
}
/**
* Calculate two vectors's scalar product, needed in order to classify the vector.
*/
double euclideanScalarProduct(double p[], double q[]) {
double euclideanScalarProduct = 0;
int length = ArrayRange(p, 0);
for (int i = 0; i < length; i++) {
euclideanScalarProduct += MathPow(p[i] - q[i], 2);
}
return (euclideanScalarProduct);
}
/**
* Classify a vector in one class.
*/
int PNNClassifyVector(double vector[]) {
double length = ArrayRange(vector, 0);
double total_pnn_nodes = ArrayRange(pnn, 1);
double result = -99999999999999999999;
int resultClass = -1;
int total_class=ArrayRange(pnn, 0);
double fx[2] = {0, 0};
double classVectorCount[2] = {0, 0};
for (int i = 0; i < total_class; i++) {
int class = pnn[i][0];
double classVector[60];
for (int j = 0; j < total_pnn_nodes; j++) {
classVector[j] = pnn[i][j + 1];
}
classVectorCount[class]++;
fx[class] += MathExp((-1) * euclideanScalarProduct(vector, classVector) / (2 * MathPow(SIGMA, 2)));
}
for (i = 0; i < ArrayRange(fx, 0); i++) {
fx[i] =fx[i]*( 1 / MathPow(2 * 3.14159265, 0.5)) * (1 / classVectorCount[i]);
Print ("fx[",i,"]=", fx[i], ", class is ", i," classVectorCount[",i,"]= ", classVectorCount[i]);
if (fx[i] > result) {
result = fx[i];
resultClass = i;
}
}
return (resultClass);
}
/**
* Store the trained PNN in a file
*/
void PNNSave() {
int handle;
handle = FileOpen("pnn.dat", FILE_WRITE | FILE_BIN);
int vectorSize = ArrayRange(pnn, 1);
for (int i = 0; i < ArraySize(pnn); i++) {
FileWriteDouble(handle, pnn[i / vectorSize][i % vectorSize]);
}
FileClose(handle);
}
/**
* Load a trained PNN from a file
*/
void PNNLoad() {
int handle;
handle = FileOpen("pnn.dat", FILE_READ | FILE_BIN);
int fileSize = FileSize(handle);
ArrayResize(pnn, fileSize / (61 * 8));
int vectorSize = ArrayRange(pnn, 1);
for (int i = 0; i < fileSize; i++) {
pnn[i / vectorSize][i % vectorSize] = FileReadDouble(handle);
}
FileClose(handle);
}
int init() {
PNNInit();
return(0);
}
/**
* Train the PNN
*/
int start() {
double buy_vector[5];
double sell_vector[3];
double vector[1];
buy_vector[0]=2;
buy_vector[1]=2.5;
buy_vector[2]=3;
buy_vector[3]=1;
buy_vector[4]=6;
sell_vector[0]=6;
sell_vector[1]=6.5;
sell_vector[2]=7;
vector[0]=3;
PNNAddVector(0, buy_vector);
PNNAddVector(1, sell_vector);
int class = PNNClassifyVector(vector);
/* Comment( "\n Verify Comeze PNN ",
"\n input value",
"\n*=====================*",
"\n buy class = ", DoubleToStr(buy_vector[0],2),
" , ",DoubleToStr(buy_vector[1],2),
" , ", DoubleToStr(buy_vector[2],2),
" , ", DoubleToStr(buy_vector[3],2),
" , ", DoubleToStr(buy_vector[4],2),
"\n sell class = ", DoubleToStr(sell_vector[0],2),
" , ",DoubleToStr(sell_vector[1],2),
" , ", DoubleToStr(sell_vector[2],2),
"\n sigma = ", DoubleToStr(SIGMA,2),
"\n\n output classification",
"\n*=====================*",
"\n int class = ", class,
"\n\n*===============");
*/
/* Print("buy class = ", DoubleToStr(buy_vector[0],2),
" , ",DoubleToStr(buy_vector[1],2),
" , ", DoubleToStr(buy_vector[2],2),
" , ", DoubleToStr(buy_vector[3],2),
" , ", DoubleToStr(buy_vector[4],2));
Print("sell class = ", DoubleToStr(sell_vector[0],2),
" , ", DoubleToStr(sell_vector[1],2),
" , ", DoubleToStr(sell_vector[2],2)," sigma = ", DoubleToStr(SIGMA,2));*/
Print ("pnn = ", DoubleToStr(pnn[0][0],2),
" , ", DoubleToStr(pnn[0][1],2),
" , ", DoubleToStr(pnn[0][2],2),
" , ", DoubleToStr(pnn[0][3],2),
" , ", DoubleToStr(pnn[0][4],2),
" , ", DoubleToStr(pnn[0][5],2),
" , ", DoubleToStr(pnn[1][0],2)," , ", DoubleToStr(pnn[1][1],2)," , ", DoubleToStr(pnn[1][2],2)," , ", DoubleToStr(pnn[1][3],2));
return(0);
}
/**
* Store the trained PNN in a file
*/
int deinit() {
PNNSave();
}
Comments
Markdown Formatting Guide
# H1
## H2
### H3
**bold text**
*italicized text*
[title](https://www.example.com)

`code`
```
code block
```
> blockquote
- Item 1
- Item 2
1. First item
2. Second item
---