Merge branch 'master' of https://github.com/FBRDNLMS/NLMSvariants
This commit is contained in:
commit
59cf6e19a5
|
@ -60,7 +60,8 @@ Logs are created by default:
|
||||||
+ each nlms function.Predicted value, input value and error value as well as square mean and deviation
|
+ each nlms function.Predicted value, input value and error value as well as square mean and deviation
|
||||||
+ updated weights for each nlms calculation
|
+ updated weights for each nlms calculation
|
||||||
|
|
||||||
`CPP_NLMS -i <inputfile> -g true` prints a graphical overview to `graphResults.html`. The output is an __SVG__ embedded in html.
|
`CPP_NLMS -i <inputfile> -g true` prints a graphical overview to `graphResults.html` using the `graphResults_template.html` in the same directory as the binary.
|
||||||
|
`CPP_NLMS -i <inputfile> -g <pathToTemplate>` does the same, only the `graphResults_template.html` is handed from a diffrent path. The output is an __SVG__ embedded in html for both.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
|
@ -194,7 +194,7 @@ Variant (1/3), substract local mean.
|
||||||
*/
|
*/
|
||||||
void localMean ( mldata_t *mlData, point_t points[] ) {
|
void localMean ( mldata_t *mlData, point_t points[] ) {
|
||||||
double *localWeights = (double *) malloc ( sizeof(double) * mlData->windowSize + 1);
|
double *localWeights = (double *) malloc ( sizeof(double) * mlData->windowSize + 1);
|
||||||
localWeights = mlData->weights;
|
memcpy(localWeights, mlData->weights, sizeof(double) * sizeof(mlData->windowSize) );
|
||||||
|
|
||||||
char fileName[50];
|
char fileName[50];
|
||||||
const unsigned xErrorLength = mlData->samplesCount;
|
const unsigned xErrorLength = mlData->samplesCount;
|
||||||
|
@ -227,7 +227,7 @@ void localMean ( mldata_t *mlData, point_t points[] ) {
|
||||||
xError[xCount] = xActual - xPredicted; // Get error value
|
xError[xCount] = xActual - xPredicted; // Get error value
|
||||||
xSquared = 0.0;
|
xSquared = 0.0;
|
||||||
for (i = 1; i < _arrayLength; i++) { // Get xSquared
|
for (i = 1; i < _arrayLength; i++) { // Get xSquared
|
||||||
xSquared += pow(xSamples[xCount - i] - xMean, 2);
|
xSquared += (xSamples[xCount - i] - xMean) * (xSamples[xCount - i] - xMean);
|
||||||
}
|
}
|
||||||
if ( xSquared == 0.0 ) { // Otherwise returns Pred: -1.#IND00 in some occassions
|
if ( xSquared == 0.0 ) { // Otherwise returns Pred: -1.#IND00 in some occassions
|
||||||
xSquared = 1.0;
|
xSquared = 1.0;
|
||||||
|
@ -235,9 +235,9 @@ void localMean ( mldata_t *mlData, point_t points[] ) {
|
||||||
for ( i = 1; i < _arrayLength; i++ ) { // Update weights
|
for ( i = 1; i < _arrayLength; i++ ) { // Update weights
|
||||||
localWeights[i] = localWeights[i - 1] + mlData->learnrate * xError[xCount] // Substract localMean
|
localWeights[i] = localWeights[i - 1] + mlData->learnrate * xError[xCount] // Substract localMean
|
||||||
* ( (xSamples[xCount - i] - xMean) / xSquared );
|
* ( (xSamples[xCount - i] - xMean) / xSquared );
|
||||||
fprintf( fp9, "%lf\n", localWeights[i] );
|
fprintf( fp9, "%lf;", localWeights[i] );
|
||||||
}
|
}
|
||||||
|
fprintf(fp9, "\n");
|
||||||
fprintf(fp4, "%d\t%f\t%f\t%f\n", xCount, xPredicted, xActual, xError[xCount]); // Write to logfile
|
fprintf(fp4, "%d\t%f\t%f\t%f\n", xCount, xPredicted, xActual, xError[xCount]); // Write to logfile
|
||||||
|
|
||||||
points[xCount].xVal[1] = xCount; // Save points so graph can be build later on
|
points[xCount].xVal[1] = xCount; // Save points so graph can be build later on
|
||||||
|
@ -253,12 +253,14 @@ void localMean ( mldata_t *mlData, point_t points[] ) {
|
||||||
double deviation = 0.0;
|
double deviation = 0.0;
|
||||||
|
|
||||||
for (i = 1; i < xErrorLength; i++) { // Mean square
|
for (i = 1; i < xErrorLength; i++) { // Mean square
|
||||||
deviation += pow(xError[i] - mean, 2);
|
deviation += (xError[i] - mean) * (xError[i] - mean);
|
||||||
}
|
}
|
||||||
deviation /= xErrorLength; // Deviation
|
deviation /= xErrorLength; // Deviation
|
||||||
printf("mean square err: %lf, variance: %lf\t\tlocal Mean\n", mean, deviation);
|
printf("mean square err: %lf, variance: %lf\t\tlocal Mean\n", mean, deviation);
|
||||||
fprintf(fp4, "\nQuadratische Varianz(x_error): %f\nMittelwert:(x_error): %f\n\n", deviation, mean); // Write to logfile
|
fprintf(fp4, "\nQuadratische Varianz(x_error): %f\nMittelwert:(x_error): %f\n\n", deviation, mean); // Write to logfile
|
||||||
fclose(fp4);
|
fclose(fp4);
|
||||||
|
|
||||||
|
free(localWeights);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -272,12 +274,12 @@ substract direct predecessor
|
||||||
======================================================================================================
|
======================================================================================================
|
||||||
*/
|
*/
|
||||||
void directPredecessor( mldata_t *mlData, point_t points[]) {
|
void directPredecessor( mldata_t *mlData, point_t points[]) {
|
||||||
double *localWeights = ( double * ) malloc ( sizeof(double) * mlData->windowSize + 1 );
|
double *localWeights = (double * ) malloc ( sizeof(double) * mlData->windowSize + 1 );
|
||||||
localWeights = mlData->weights;
|
memcpy(localWeights, mlData->weights, sizeof(double) * sizeof(mlData->windowSize));
|
||||||
|
|
||||||
char fileName[512];
|
char fileName[512];
|
||||||
const unsigned xErrorLength = mlData->samplesCount;
|
const unsigned xErrorLength = mlData->samplesCount;
|
||||||
double xError[xErrorLength];
|
double xError[xErrorLength];
|
||||||
unsigned xCount = 0, i;
|
unsigned xCount = 0, i;
|
||||||
double xActual = 0.0;
|
double xActual = 0.0;
|
||||||
double xPredicted = 0.0;
|
double xPredicted = 0.0;
|
||||||
|
@ -303,7 +305,8 @@ void directPredecessor( mldata_t *mlData, point_t points[]) {
|
||||||
|
|
||||||
double xSquared = 0.0;
|
double xSquared = 0.0;
|
||||||
for (i = 1; i < _arrayLength; i++) {
|
for (i = 1; i < _arrayLength; i++) {
|
||||||
xSquared += pow(xSamples[xCount - 1] - xSamples[xCount - i - 1], 2); // substract direct predecessor
|
xSquared += (xSamples[xCount - 1] - xSamples[xCount - i - 1])
|
||||||
|
* (xSamples[xCount - 1] - xSamples[xCount - i - 1]); // substract direct predecessor
|
||||||
}
|
}
|
||||||
if ( xSquared == 0.0 ) { // Otherwise returns Pred: -1.#IND00 in some occassions
|
if ( xSquared == 0.0 ) { // Otherwise returns Pred: -1.#IND00 in some occassions
|
||||||
xSquared = 1.0;
|
xSquared = 1.0;
|
||||||
|
@ -327,12 +330,14 @@ void directPredecessor( mldata_t *mlData, point_t points[]) {
|
||||||
|
|
||||||
|
|
||||||
for (i = 1; i < xErrorLength; i++) {
|
for (i = 1; i < xErrorLength; i++) {
|
||||||
deviation += pow(xError[i] - mean, 2); // Mean square
|
deviation += (xError[i] - mean) * (xError[i] - mean); // Mean square
|
||||||
}
|
}
|
||||||
deviation /= xErrorLength; // Deviation
|
deviation /= xErrorLength; // Deviation
|
||||||
printf("mean square err: %lf, variance: %lf\t\t\tdirect Predecessor\n", mean, deviation);
|
printf("mean square err: %lf, variance: %lf\t\t\tdirect Predecessor\n", mean, deviation);
|
||||||
fprintf(fp3, "\nQuadratische Varianz(x_error): %f\nMittelwert:(x_error): %f\n\n", deviation, mean);
|
fprintf(fp3, "\nQuadratische Varianz(x_error): %f\nMittelwert:(x_error): %f\n\n", deviation, mean);
|
||||||
fclose(fp3);
|
fclose(fp3);
|
||||||
|
|
||||||
|
free(localWeights);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -347,10 +352,11 @@ differential predecessor.
|
||||||
*/
|
*/
|
||||||
void differentialPredecessor ( mldata_t *mlData, point_t points[] ) {
|
void differentialPredecessor ( mldata_t *mlData, point_t points[] ) {
|
||||||
double *localWeights = (double *) malloc ( sizeof(double) * mlData->windowSize + 1 );
|
double *localWeights = (double *) malloc ( sizeof(double) * mlData->windowSize + 1 );
|
||||||
localWeights = mlData->weights;
|
memcpy(localWeights, mlData->weights, sizeof(double) * sizeof(mlData->windowSize));
|
||||||
const unsigned xErrorLength = mlData->samplesCount;
|
|
||||||
|
const unsigned xErrorLength = mlData->samplesCount;
|
||||||
char fileName[512];
|
char fileName[512];
|
||||||
double xError[xErrorLength];
|
double xError[xErrorLength];
|
||||||
|
|
||||||
unsigned xCount = 0, i;
|
unsigned xCount = 0, i;
|
||||||
double xPredicted = 0.0;
|
double xPredicted = 0.0;
|
||||||
|
@ -377,7 +383,8 @@ void differentialPredecessor ( mldata_t *mlData, point_t points[] ) {
|
||||||
|
|
||||||
double xSquared = 0.0;
|
double xSquared = 0.0;
|
||||||
for (i = 1; i < _arrayLength; i++) {
|
for (i = 1; i < _arrayLength; i++) {
|
||||||
xSquared += pow(xSamples[xCount - i] - xSamples[xCount - i - 1], 2); // Substract direct predecessor
|
xSquared += (xSamples[xCount - i] - xSamples[xCount - i - 1])
|
||||||
|
* (xSamples[xCount - i] - xSamples[xCount - i -1]); // Substract direct predecessor
|
||||||
}
|
}
|
||||||
if ( xSquared == 0.0 ) { // Otherwise returns Pred: -1.#IND00 in some occassions
|
if ( xSquared == 0.0 ) { // Otherwise returns Pred: -1.#IND00 in some occassions
|
||||||
xSquared = 1.0;
|
xSquared = 1.0;
|
||||||
|
@ -404,12 +411,14 @@ void differentialPredecessor ( mldata_t *mlData, point_t points[] ) {
|
||||||
|
|
||||||
|
|
||||||
for (i = 1; i < xErrorLength; i++) { // Mean square
|
for (i = 1; i < xErrorLength; i++) { // Mean square
|
||||||
deviation += pow(xError[i] - mean, 2);
|
deviation += (xError[i] - mean) * (xError[i] - mean);;
|
||||||
}
|
}
|
||||||
deviation /= xErrorLength;
|
deviation /= xErrorLength;
|
||||||
printf("mean square err: %lf, variance: %lf\t\t\tdifferential Predecessor\n", mean, deviation);
|
printf("mean square err: %lf, variance: %lf\t\t\tdifferential Predecessor\n", mean, deviation);
|
||||||
fprintf(fp6, "\nQuadratische Varianz(x_error): %f\nMittelwert:(x_error): %f\n\n", deviation, mean);
|
fprintf(fp6, "\nQuadratische Varianz(x_error): %f\nMittelwert:(x_error): %f\n\n", deviation, mean);
|
||||||
fclose(fp6);
|
fclose(fp6);
|
||||||
|
|
||||||
|
free(localWeights);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -29,7 +29,7 @@ There are a bunch of options you can predefine but do not have to. The only para
|
||||||
| -w | Size of M (window). | 5 |
|
| -w | Size of M (window). | 5 |
|
||||||
| -c | Choose RGB color channel, green has least noise. | green |
|
| -c | Choose RGB color channel, green has least noise. | green |
|
||||||
| -l | Learnrate of machine learning.| 0.4 |
|
| -l | Learnrate of machine learning.| 0.4 |
|
||||||
| -g | include graph building. Choose for amount of input data lower than 1200. If the template is located in another folder use its path otherwise use true.| none|
|
| -g | include graph building. Choose for amount of input data lower than 1200. If the template is located in another folder use its path otherwise use true. Do not use whitespace in path to folder.| none|
|
||||||
| -s | Seed randomizing weights. Choose for repoducability. | time(NULL)|
|
| -s | Seed randomizing weights. Choose for repoducability. | time(NULL)|
|
||||||
|
|
||||||
This code is ANSI aka C89 compatible. No POSIX, C99, C11 or GNU libs, because it had to be windows compatible . There are way easier methods like getline() for file parsing or getopt() as an args parser, because of compatibility reasons things have been kept simple.
|
This code is ANSI aka C89 compatible. No POSIX, C99, C11 or GNU libs, because it had to be windows compatible . There are way easier methods like getline() for file parsing or getopt() as an args parser, because of compatibility reasons things have been kept simple.
|
||||||
|
|
|
@ -28,6 +28,6 @@ There are a bunch of options you can predefine but do not have to. The only para
|
||||||
| -w | Size of M (window). | 5 |
|
| -w | Size of M (window). | 5 |
|
||||||
| -c | Choose RGB color channel, green has least noise. | green |
|
| -c | Choose RGB color channel, green has least noise. | green |
|
||||||
| -l | Learnrate of machine learning. | 0.4 |
|
| -l | Learnrate of machine learning. | 0.4 |
|
||||||
| -g | include graph building. Choose for amount of input data lower than 1200. Choose path if template is located in another folder, else use true.| none|
|
| -g | include graph building. Choose for amount of input data lower than 1200. Choose path if template is located in another folder, else use true. Do not use whitespace in path.| none|
|
||||||
| -s | Seed randomizing weights. Choose for repoducability. | time(NULL)|
|
| -s | Seed randomizing weights. Choose for repoducability. | time(NULL)|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue