Miscellaneous
0
Views
0
Downloads
0
Favorites
pnn_test
//+------------------------------------------------------------------+
//| pnn1.mq4 |
//| Copyright © 2008, MetaQuotes Software Corp. |
//| http://www.metaquotes.net |
//+------------------------------------------------------------------+
#property copyright "Copyright © 2008, MetaQuotes Software Corp."
#property link "http://www.metaquotes.net"
#define NR_OF_ATTR 2
#define NR_OF_ATTR1 3 //NR_OF_ATTR+1
double vector[NR_OF_ATTR];
//+------------------------------------------------------------------+
//| script program start function |
//+------------------------------------------------------------------+
int start()
{
//----
PNNInit();
/*
//Train
vector[0]=0;
vector[1]=0;
PNNAddVector(0, vector);
vector[0]=0;
vector[1]=1;
PNNAddVector(1, vector);
vector[0]=1;
vector[1]=0;
PNNAddVector(1, vector);
vector[0]=1;
vector[1]=1;
PNNAddVector(0, vector);
PNNSave("EURUSD",1,1);
Comment("Train...");
*/
//Test
PNNLoad("EURUSD",1,1);
int class=-1;
string s1="";
vector[0]=0;
vector[1]=0;
class = PNNClassifyVector(vector);
s1=s1+vector[0]+" xor "+vector[1]+" = "+class+"\n";
vector[0]=0;
vector[1]=1;
class = PNNClassifyVector(vector);
s1=s1+vector[0]+" xor "+vector[1]+" = "+class+"\n";
vector[0]=1;
vector[1]=0;
class = PNNClassifyVector(vector);
s1=s1+vector[0]+" xor "+vector[1]+" = "+class+"\n";
vector[0]=1;
vector[1]=1;
class = PNNClassifyVector(vector);
s1=s1+vector[0]+" xor "+vector[1]+" = "+class+"\n";
Comment(s1);
//----
return(0);
}
//+------------------------------------------------------------------+
//+------------------------------------------------------------------+
//| A probabilistic neural network (PNN) implementation |
//| |
//| PNN_mod_v1.mq4 |
//| Paco Hernández Gómez |
//| http://www.hernandezgomez.com |
//+------------------------------------------------------------------+
#define pi 3.14159265358979323846
double SIGMA = 0.1;
/**
* Probabilistic neural network data is stored in an array composed by the training vectors and its classified classes.
*
* To optimize speed, the possible classes are numbered from 0 to n, and are store in the first position of each
* trained vector.
*
* pnn[x][0] = Class where the training vector is classified (There are two possible classes (0 - Buy, 1 - Sell)).
* pnn[x][1..n] = Training vector components classified in class pnn[x][0].
*
* In this example, training vectors are going to have 60 different components.
*/
double pnn[0][NR_OF_ATTR1];
/**
* Initialize PNN Vectors with 0 traning vectors.
*/
void PNNInit() {
ArrayResize(pnn, 0);
}
/**
* Add a training vector to the trained vectors set and classify it to a class.
*/
void PNNAddVector(int class, double vector[]) {
// Create a new position in the array to store the new training vector and its associated class.
int length = ArrayRange(pnn, 0);
ArrayResize(pnn, length + 1);
// Store the new training vector class
pnn[length][0] = class;
// Store the new training vector
for (int i = 0; i < ArrayRange(vector, 0); i++) {
pnn[length][i + 1] = vector[i];
}
}
/**
* Calculate two vectors's scalar product, needed in order to classify the vector.
*/
double euclideanScalarProduct(double p[], double q[]) {
double euclideanScalarProduct = 0;
int length = ArrayRange(p, 0);
for (int i = 0; i < length; i++) {
euclideanScalarProduct += MathPow(p[i] - q[i], 2);
}
return (euclideanScalarProduct);
}
/**
* Classify a vector in one class.
*/
int PNNClassifyVector(double vector[]) {
double length = ArrayRange(vector, 0);
double result = -99999999999999999999;
int resultClass = -1;
double fx[2] = {0, 0};
double classVectorCount[2] = {0, 0};
for (int i = 0; i < ArrayRange(pnn, 0); i++)
{
int class = pnn[i][0];
double classVector[NR_OF_ATTR];
for (int j = 0; j < length; j++)
{
classVector[j] = pnn[i][j + 1];
}
classVectorCount[class]++;
double x1=(2 * MathPow(SIGMA, 2));
if(x1==0)
x1=0.000000001;
fx[class] += MathExp((-1) * euclideanScalarProduct(vector, classVector) / x1);
}
for (i = 0; i < ArrayRange(fx, 0); i++)
{
double x3=classVectorCount[i];
if(x3==0)
x3=0.000000001;
double x2=(MathPow(2 * pi, length / 2) * MathPow(SIGMA, length)) * (1 / x3 );
if(x2==0)
x2=0.000000001;
fx[i] *= 1 / x2;
if (fx[i] > result) {
result = fx[i];
resultClass = i;
}
}
return (resultClass);
}
/**
* Store the trained PNN in a file
*/
void PNNSave(string symb,int per,int nr) {
int handle;
handle = FileOpen(symb+"_"+per+"_"+nr+"_"+"pnn.dat", FILE_WRITE | FILE_BIN);
int vectorSize = ArrayRange(pnn, 1);
for (int i = 0; i < ArraySize(pnn); i++) {
FileWriteDouble(handle, pnn[i / vectorSize][i % vectorSize]);
}
FileClose(handle);
}
/**
* Load a trained PNN from a file
*/
void PNNLoad(string symb,int per,int nr) {
int handle;
handle = FileOpen(symb+"_"+per+"_"+nr+"_"+"pnn.dat", FILE_READ | FILE_BIN);
int fileSize = FileSize(handle);
ArrayResize(pnn, fileSize / (NR_OF_ATTR1 * 8));
int vectorSize = ArrayRange(pnn, 1);
for (int i = 0; i < fileSize; i++) {
pnn[i / vectorSize][i % vectorSize] = FileReadDouble(handle);
}
FileClose(handle);
}
Comments
Markdown Formatting Guide
# H1
## H2
### H3
**bold text**
*italicized text*
[title](https://www.example.com)

`code`
```
code block
```
> blockquote
- Item 1
- Item 2
1. First item
2. Second item
---