Miscellaneous
0
Views
0
Downloads
0
Favorites
pnn
//+------------------------------------------------------------------+
//| A probabilistic neural network (PNN) implementation |
//| |
//| PNN.mq4 |
//| Paco Hernández Gómez |
//| http://www.hernandezgomez.com |
//+------------------------------------------------------------------+
#property copyright "Paco Hernández Gómez"
#property link "http://www.hernandezgomez.com"
#property library
double SIGMA = 0.1;
/**
* Probabilistic neural network data is stored in an array composed by the training vectors and its classified classes.
*
* To optimize speed, the possible classes are numbered from 0 to n, and are store in the first position of each
* trained vector.
*
* pnn[x][0] = Class where the training vector is classified (There are two possible classes (0 - Buy, 1 - Sell)).
* pnn[x][1..n] = Training vector components classified in class pnn[x][0].
*
* In this example, training vectors are going to have 60 different components.
*/
double pnn[0][61];
/**
* Initialize PNN Vectors with 0 traning vectors.
*/
void PNNInit() {
ArrayResize(pnn, 0);
}
/**
* Add a training vector to the trained vectors set and classify it to a class.
*/
void PNNAddVector(int class, double vector[]) {
// Create a new position in the array to store the new training vector and its associated class.
int length = ArrayRange(pnn, 0);
ArrayResize(pnn, length + 1);
// Store the new training vector class
pnn[length][0] = class;
// Store the new training vector
for (int i = 0; i < ArrayRange(vector, 0); i++) {
pnn[length][i + 1] = vector[i];
}
}
/**
* Calculate two vectors's scalar product, needed in order to classify the vector.
*/
double euclideanScalarProduct(double p[], double q[]) {
double euclideanScalarProduct = 0;
int length = ArrayRange(p, 0);
for (int i = 0; i < length; i++) {
euclideanScalarProduct += MathPow(p[i] - q[i], 2);
}
return (euclideanScalarProduct);
}
/**
* Classify a vector in one class.
*/
int PNNClassifyVector(double vector[]) {
double length = ArrayRange(vector, 0);
double result = -99999999999999999999;
int resultClass = -1;
double fx[2] = {0, 0};
double classVectorCount[2] = {0, 0};
for (int i = 0; i < ArrayRange(pnn, 0); i++) {
int class = pnn[i][0];
double classVector[60];
for (int j = 0; j < length; j++) {
classVector[j] = pnn[i][j + 1];
}
classVectorCount[class]++;
fx[class] += MathExp((-1) * euclideanScalarProduct(vector, classVector) / (2 * MathPow(SIGMA, 2)));
}
for (i = 0; i < ArrayRange(fx, 0); i++) {
fx[i] *= 1 / (MathPow(2 * 3.14159265, length / 2) * MathPow(SIGMA, length)) * (1 / classVectorCount[i]);
if (fx[i] > result) {
result = fx[i];
resultClass = i;
}
}
return (resultClass);
}
/**
* Store the trained PNN in a file
*/
void PNNSave() {
int handle;
handle = FileOpen("pnn.dat", FILE_WRITE | FILE_BIN);
int vectorSize = ArrayRange(pnn, 1);
for (int i = 0; i < ArraySize(pnn); i++) {
FileWriteDouble(handle, pnn[i / vectorSize][i % vectorSize]);
}
FileClose(handle);
}
/**
* Load a trained PNN from a file
*/
void PNNLoad() {
int handle;
handle = FileOpen("pnn.dat", FILE_READ | FILE_BIN);
int fileSize = FileSize(handle);
ArrayResize(pnn, fileSize / (61 * 8));
int vectorSize = ArrayRange(pnn, 1);
for (int i = 0; i < fileSize; i++) {
pnn[i / vectorSize][i % vectorSize] = FileReadDouble(handle);
}
FileClose(handle);
}
Comments
Markdown Formatting Guide
# H1
## H2
### H3
**bold text**
*italicized text*
[title](https://www.example.com)

`code`
```
code block
```
> blockquote
- Item 1
- Item 2
1. First item
2. Second item
---