graphing is opt in now.

This commit is contained in:
gurkenhabicht 2018-05-24 11:02:00 +02:00
parent 25251cd572
commit 8ce167ff0d
4 changed files with 268 additions and 257 deletions

View File

@ -64,13 +64,14 @@ int main( int argc, char **argv ) {
char *colorChannel = (char *) malloc(sizeof(char)* 32);
char *inputfile = (char *)malloc(sizeof(char) * 32);
unsigned *seed = NULL;
unsigned k, xclude = 0;
unsigned k, include = 0;
unsigned windowSize = 5;
unsigned samplesCount = 512;
char *stdcolor = "green", xBuffer[512];
colorChannel = stdcolor;
unsigned int uint_buffer[1], windowBuffer[1];
double learnrate = 0.4;
char *istrue = "true";
while( (argc > 1) && (argv[1][0] == '-') ) { // Parses parameters from stdin
@ -111,9 +112,14 @@ int main( int argc, char **argv ) {
++argv;
--argc;
break;
case 'x':
case 'g':
sscanf(&argv[1][3], "%s", xBuffer);
xclude = 1;
if ( strstr(xBuffer, istrue) ) {
include = 1;
} else {
printf( "Wrong Argruments: %s\n", argv[1]);
usage(argv);
}
++argv;
--argc;
break;
@ -169,7 +175,7 @@ int main( int argc, char **argv ) {
directPredecessor ( mlData, points);
differentialPredecessor( mlData, points );
if ( xclude == 0 ) {
if ( include == 1 ) {
mkSvgGraph(points); // Graph building
}
@ -480,8 +486,8 @@ char * fileSuffix ( int id ) {
"_localMean.txt",
"_testvalues.txt",
"_differential_predecessor.txt",
"_weights_used_local_mean",
"_weights_used_diff_pred",
"_weights_used_local_mean.txt",
"_weights_used_diff_pred.txt",
};
return suffix[id];
}

View File

@ -24,12 +24,12 @@ There are a bunch of options you can predefine but do not have to. The only para
| Parameter | Description | StdVal |
|:----------|:-----------------------------:|:-------|
| -i | The inputfile, has to be PPM | none |
| -n | Amount of input data used | 500 |
| -w | Size of M (window) | 5 |
| -i | The inputfile, has to be PPM. | none |
| -n | Amount of input data used. | 500 |
| -w | Size of M (window). | 5 |
| -c | Choose RGB color channel, green has least noise. | green |
| -l | Learnrate of machine learning | 0.4 |
| -x | Exclude graph building. Logfiles only, choose for insane amount of input data. 10Mio. Pixels tested so far.| none|
| -l | Learnrate of machine learning.| 0.4 |
| -g true | include graph building. Choose for amount of input data lower than 1200.| none|
| -s | Seed randomizing weights. Choose for repoducability. | time(NULL)|
This code is ANSI compatible no POSIX, C99, C11 or GNU libs, because it had to be VS compatible . There are way easier methods like getline() or getopt(), I know ...

View File

@ -23,116 +23,122 @@ double *xSamples; // Input values
mldata_t *mlData = NULL; // Machine learning
point_t *points = NULL; // Graphing
/* *graph building* */
/* *graph building* */
static imagePixel_t * rdPPM(char *fileName); // Read PPM file format
void mkPpmFile(char *fileName, imagePixel_t *image); // Writes PPM file
int ppmColorChannel(FILE* fp, imagePixel_t *image, // Writes colorChannel from PPM file to log file
char *colorChannel, mldata_t *mlData);
char *colorChannel, mldata_t *mlData);
void colorSamples(FILE* fp, mldata_t *mlData); // Stores color channel values in xSamples
/* *file handling* */
char * mkFileName(char* buffer,
size_t max_len, int suffixId);
char *fileSuffix(int id);
char *fileHeader(int id); // Header inside the logfiles
//void myLogger ( FILE* fp, point_t points[] );
/* *file handling* */
char * mkFileName ( char* buffer,
size_t max_len, int suffixId );
char *fileSuffix ( int id );
char *fileHeader ( int id ); // Header inside the logfiles
//void myLogger ( FILE* fp, point_t points[] );
void bufferLogger(char *buffer, point_t points[]); // Writes points to graph template
void mkSvgGraph(point_t points[]); // Parses graph template and calls bufferLogger()
void weightsLogger(double *weights, int suffix); // Writes updated weights to a file
void mkSvgGraph ( point_t points[] ); // Parses graph template and calls bufferLogger()
void weightsLogger ( double *weights, int suffix ); // Writes updated weights to a file
/* *rand seed* */
double r2(void); // Random val between 0 and 1
double rndm(void);
/* *rand seed* */
double r2 ( void ); // Random val between 0 and 1
double rndm ( void );
/* *args parser* */
void usage(char **argv); // Help text called by args parser
void usage ( char **argv ); // Help text called by args parser
/* *math* */
/* *math* */
mldata_t * init_mldata_t(unsigned windowSize, unsigned samplesCount, double learnrate);
double sum_array(double x[], int length);
void localMean(mldata_t *mlData, point_t points[]); // First,
void directPredecessor(mldata_t *mlData, point_t points[]); // Second,
void differentialPredecessor(mldata_t *mlData, point_t points[]); // Third filter implementation
void localMean ( mldata_t *mlData,point_t points[] ); // First,
void directPredecessor ( mldata_t *mlData, point_t points[] ); // Second,
void differentialPredecessor ( mldata_t *mlData, point_t points[] ); // Third filter implementation
double *popNAN(double *xError); // Returns array without NAN values, if any exist
double windowXMean(int _arraylength, int xCount); // Returns mean value of given window
int main(int argc, char **argv) {
char *colorChannel = (char *)malloc(sizeof(char) * 32);
int main( int argc, char **argv ) {
char *colorChannel = (char *) malloc(sizeof(char)* 32);
char *inputfile = (char *)malloc(sizeof(char) * 32);
unsigned *seed = NULL;
unsigned k, xclude = 0;
unsigned k, include = 0;
unsigned windowSize = 5;
unsigned samplesCount = 512;
char *stdcolor = "green", xBuffer[512];
colorChannel = stdcolor;
unsigned int uint_buffer[1], windowBuffer[1];
double learnrate = 0.4;
while ((argc > 1) && (argv[1][0] == '-')) { // Parses parameters from stdin
switch (argv[1][1]) {
case 'i':
inputfile = &argv[1][3];
++argv;
--argc;
break;
case 'w':
sscanf(&argv[1][3], "%u", windowBuffer);
windowSize = windowBuffer[0];
++argv;
--argc;
break;
case 'c':
colorChannel = &argv[1][3];
++argv;
--argc;
break;
case 's':
sscanf(&argv[1][3], "%u", uint_buffer);
seed = &uint_buffer[0];
++argv;
--argc;
break;
case 'n':
sscanf(&argv[1][3], "%u", &samplesCount);
++argv;
--argc;
break;
case'h':
printf("Program name: %s\n", argv[0]);
usage(argv);
break;
case 'l':
sscanf(&argv[1][3], "%lf", &learnrate);
++argv;
--argc;
break;
case 'x':
sscanf(&argv[1][3], "%s", xBuffer);
xclude = 1;
++argv;
--argc;
break;
default:
printf("Wrong Arguments: %s\n", argv[1]);
usage(argv);
}
char *istrue = "true";
while( (argc > 1) && (argv[1][0] == '-') ) { // Parses parameters from stdin
switch( argv[1][1] ) {
case 'i':
inputfile = &argv[1][3];
++argv;
--argc;
break;
case 'w':
sscanf(&argv[1][3], "%u", windowBuffer);
windowSize = windowBuffer[0];
++argv;
--argc;
break;
case 'c':
colorChannel = &argv[1][3];
++argv;
--argc;
break;
case 's':
sscanf(&argv[1][3], "%u", uint_buffer);
seed = &uint_buffer[0];
++argv;
--argc;
break;
case 'n':
sscanf(&argv[1][3], "%u", &samplesCount);
++argv;
--argc;
break;
case'h':
printf("Program name: %s\n", argv[0]);
usage(argv);
break;
case 'l':
sscanf(&argv[1][3], "%lf", &learnrate);
++argv;
--argc;
break;
case 'g':
sscanf(&argv[1][3], "%s", xBuffer);
if ( strstr(xBuffer, istrue) ) {
include = 1;
} else {
printf( "Wrong Argruments: %s\n", argv[1]);
usage(argv);
}
++argv;
--argc;
break;
default:
printf("Wrong Arguments: %s\n", argv[1]);
usage(argv);
}
++argv;
--argc;
}
init_mldata_t(windowSize, samplesCount, learnrate);
xSamples = (double *)malloc(sizeof(double) * mlData->samplesCount); // Resize input values
points = (point_t *)malloc(sizeof(point_t) * mlData->samplesCount); // Resize points
imagePixel_t *image;
init_mldata_t ( windowSize, samplesCount, learnrate );
xSamples = (double *) malloc ( sizeof(double) * mlData->samplesCount ); // Resize input values
points = (point_t *) malloc ( sizeof(point_t) * mlData->samplesCount); // Resize points
imagePixel_t *image;
image = rdPPM(inputfile); // Set Pointer on input values
printf("window Size: %d\n", mlData->windowSize);
printf("window Size: %d\n", mlData->windowSize);
char fileName[50]; // Logfiles and their names
mkFileName(fileName, sizeof(fileName), TEST_VALUES);
FILE* fp5 = fopen(fileName, "w");
@ -141,17 +147,16 @@ int main(int argc, char **argv) {
FILE* fp6 = fopen(fileName, "r");
colorSamples(fp6, mlData);
if ((seed != NULL)) {
srand(*seed); // Seed for random number generating
if ( (seed != NULL) ){
srand( *seed ); // Seed for random number generating
printf("srand is reproducable\n");
}
else {
srand((unsigned int)time(NULL));
} else {
srand( (unsigned int)time(NULL) );
printf("srand depends on time\n"); // Default seed is time(NULL)
}
printf("generated weights:\n");
for (k = 0; k < mlData->windowSize; k++) {
for (k = 0; k < mlData->windowSize; k++) {
mlData->weights[k] = rndm(); // Init random weights
printf("[%d] %lf\n", k, mlData->weights[k]);
}
@ -160,20 +165,20 @@ int main(int argc, char **argv) {
mkFileName(fileName, sizeof(fileName), PURE_WEIGHTS); // Logfile weights
FILE *fp0 = fopen(fileName, "w");
for (k = 0; k < mlData->windowSize; k++) {
fprintf(fp0, "[%d]%lf\n", k, mlData->weights[k]);
fprintf(fp0, "[%d]%lf\n", k, mlData->weights[k]);
}
fclose(fp0);
/* *math magic* */
localMean(mlData, points);
directPredecessor(mlData, points);
differentialPredecessor(mlData, points);
localMean ( mlData, points );
directPredecessor ( mlData, points);
differentialPredecessor( mlData, points );
if (xclude == 0) {
if ( include == 1 ) {
mkSvgGraph(points); // Graph building
}
}
free(image);
free(xSamples);
@ -191,56 +196,56 @@ Variant (1/3), substract local mean.
======================================================================================================
*/
void localMean(mldata_t *mlData, point_t points[]) {
double *localWeights = (double *)malloc(sizeof(double) * mlData->windowSize + 1);
localWeights = mlData->weights; // Copy weights so they can be changed locally
void localMean ( mldata_t *mlData, point_t points[] ) {
double *localWeights = (double *) malloc ( sizeof(double) * mlData->windowSize + 1);
localWeights = mlData->weights;
char fileName[50];
double *xError = (double *)malloc(sizeof(double) * mlData->samplesCount + 1); // Includes e(n)
double *xError = (double *) malloc ( sizeof(double) * mlData->samplesCount + 1); // Includes e(n)
memset(xError, 0.0, mlData->samplesCount); // Initialize xError-array with Zero
unsigned i, xCount = 0; // Runtime vars
unsigned i, xCount = 0; // Runtime vars
mkFileName(fileName, sizeof(fileName), LOCAL_MEAN); // Create Logfile and its filename
FILE* fp4 = fopen(fileName, "w");
fprintf(fp4, fileHeader(LOCAL_MEAN_HEADER));
mkFileName(fileName, sizeof(fileName), USED_WEIGHTS_LOCAL_MEAN);
FILE* fp4 = fopen(fileName, "w");
fprintf( fp4, fileHeader(LOCAL_MEAN_HEADER) );
mkFileName ( fileName, sizeof(fileName), USED_WEIGHTS_LOCAL_MEAN);
FILE *fp9 = fopen(fileName, "w");
double xMean = xSamples[0];
double xSquared = 0.0;
double xPredicted = 0.0;
double xActual = 0.0;
for (xCount = 1; xCount < mlData->samplesCount-1; xCount++) { // First value will not get predicted
unsigned _arrayLength = (xCount > mlData->windowSize) ? mlData->windowSize + 1 : xCount; // Ensures corect length at start
xMean = (xCount > 0) ? windowXMean(_arrayLength, xCount) : 0;
for ( xCount = 1; xCount < mlData->samplesCount-1; xCount++ ) { // First value will not get predicted
unsigned _arrayLength = ( xCount > mlData->windowSize ) ? mlData->windowSize + 1 : xCount; // Ensures corect length at start
xMean = (xCount > 0) ? windowXMean(_arrayLength, xCount) : 0;
xPredicted = 0.0;
xActual = xSamples[xCount];
for (i = 1; i < _arrayLength; i++) { // Get predicted value
xPredicted += (localWeights[i - 1] * (xSamples[xCount - i] - xMean));
for ( i = 1; i < _arrayLength; i++ ) { // Get predicted value
xPredicted += ( localWeights[i - 1] * (xSamples[xCount - i] - xMean) );
}
xPredicted += xMean;
xPredicted += xMean;
xError[xCount] = xActual - xPredicted; // Get error value
xSquared = 0.0;
for (i = 1; i < _arrayLength; i++) { // Get xSquared
xSquared += pow(xSamples[xCount - i] - xMean, 2);
}
if (xSquared == 0.0) { // Otherwise returns Pred: -1.#IND00 in some occassions
if ( xSquared == 0.0 ) { // Otherwise returns Pred: -1.#IND00 in some occassions
xSquared = 1.0;
}
for (i = 1; i < _arrayLength; i++) { // Update weights
for ( i = 1; i < _arrayLength; i++ ) { // Update weights
localWeights[i] = localWeights[i - 1] + mlData->learnrate * xError[xCount] // Substract localMean
* ((xSamples[xCount - i] - xMean) / xSquared);
fprintf(fp9, "%lf\n", localWeights[i]);
* ( (xSamples[xCount - i] - xMean) / xSquared );
fprintf( fp9, "%lf\n", localWeights[i] );
}
fprintf(fp4, "%d\t%f\t%f\t%f\n", xCount, xPredicted, xActual, xError[xCount]); // Write to logfile
points[xCount].xVal[1] = xCount; // Save points so graph can be build later on
points[xCount].yVal[1] = xPredicted;
points[xCount].yVal[1] = xPredicted;
points[xCount].xVal[4] = xCount;
points[xCount].yVal[4] = xError[xCount];
@ -249,8 +254,8 @@ void localMean(mldata_t *mlData, point_t points[]) {
fclose(fp9);
double *xErrorPtr = popNAN(xError); // delete NAN values from xError[]
double xErrorLength = *xErrorPtr; // Watch popNAN()!
xErrorPtr[0] = 0.0;
// printf("Xerrorl:%lf", xErrorLength);
xErrorPtr[0] = 0.0;
// printf("Xerrorl:%lf", xErrorLength);
double mean = sum_array(xErrorPtr, xErrorLength) / xErrorLength; // Mean
double deviation = 0.0;
@ -260,7 +265,7 @@ void localMean(mldata_t *mlData, point_t points[]) {
deviation /= xErrorLength; // Deviation
printf("mean:%lf, devitation:%lf\t\tlocal Mean\n", mean, deviation);
fprintf(fp4, "\nQuadratische Varianz(x_error): %f\nMittelwert:(x_error): %f\n\n", deviation, mean); // Write to logfile
//free(localWeights);
free(localWeights);
free(xErrorPtr);
free(xError);
@ -279,11 +284,12 @@ substract direct predecessor
======================================================================================================
*/
void directPredecessor(mldata_t *mlData, point_t points[]) {
double *localWeights = (double *)malloc(sizeof(double) * mlData->windowSize + 1);
void directPredecessor( mldata_t *mlData, point_t points[]) {
double *localWeights = ( double * ) malloc ( sizeof(double) * mlData->windowSize + 1 );
localWeights = mlData->weights;
char fileName[512];
double *xError = (double *)malloc(sizeof(double) * mlData->samplesCount + 1);
double *xError = (double *) malloc ( sizeof(double) * mlData->samplesCount + 1 );
memset(xError, 0.0, mlData->samplesCount);
unsigned xCount = 0, i;
double xActual = 0.0;
@ -291,20 +297,20 @@ void directPredecessor(mldata_t *mlData, point_t points[]) {
mkFileName(fileName, sizeof(fileName), DIRECT_PREDECESSOR); // Logfile and name handling
FILE *fp3 = fopen(fileName, "w");
fprintf(fp3, fileHeader(DIRECT_PREDECESSOR_HEADER));
mkFileName(fileName, sizeof(fileName), USED_WEIGHTS_DIR_PRED);
fprintf( fp3, fileHeader(DIRECT_PREDECESSOR_HEADER) );
mkFileName ( fileName, sizeof(fileName), USED_WEIGHTS_DIR_PRED);
FILE *fp9 = fopen(fileName, "w");
for (xCount = 1; xCount < mlData->samplesCount-1; xCount++) { // first value will not get predicted
unsigned _arrayLength = (xCount > mlData->windowSize) ? mlData->windowSize + 1 : xCount;
unsigned _arrayLength = ( xCount > mlData->windowSize ) ? mlData->windowSize + 1 : xCount;
xPredicted = 0.0;
xActual = xSamples[xCount];
for (i = 1; i < _arrayLength; i++) {
xPredicted += (localWeights[i - 1] * (xSamples[xCount - 1] - xSamples[xCount - i - 1]));
xPredicted += ( localWeights[i - 1] * (xSamples[xCount - 1] - xSamples[xCount - i - 1]));
}
xPredicted += xSamples[xCount - 1];
xError[xCount] = xActual - xPredicted;
@ -312,32 +318,32 @@ void directPredecessor(mldata_t *mlData, point_t points[]) {
for (i = 1; i < _arrayLength; i++) {
xSquared += pow(xSamples[xCount - 1] - xSamples[xCount - i - 1], 2); // substract direct predecessor
}
if (xSquared == 0.0) { // Otherwise returns Pred: -1.#IND00 in some occassions
if ( xSquared == 0.0 ) { // Otherwise returns Pred: -1.#IND00 in some occassions
xSquared = 1.0;
}
for (i = 1; i < _arrayLength; i++) { // Update weights
localWeights[i] = localWeights[i - 1] + mlData->learnrate * xError[xCount]
* ((xSamples[xCount - 1] - xSamples[xCount - i - 1]) / xSquared);
fprintf(fp9, "%lf\n", localWeights[i]);
for ( i = 1; i < _arrayLength; i++ ) { // Update weights
localWeights[i] = localWeights[i-1] + mlData->learnrate * xError[xCount]
* ( (xSamples[xCount - 1] - xSamples[xCount - i - 1]) / xSquared);
fprintf( fp9, "%lf\n", localWeights[i] );
}
fprintf(fp3, "%d\t%f\t%f\t%f\n", xCount, xPredicted, xActual, xError[xCount]); // Write to logfile
fprintf(fp3, "%d\t%f\t%f\t%f\n", xCount, xPredicted, xActual, xError[xCount]); // Write to logfile
points[xCount].xVal[2] = xCount; // Fill point_t array for graph building
points[xCount].yVal[2] = xPredicted;
points[xCount].xVal[5] = xCount;
points[xCount].yVal[5] = xError[xCount];
// weightsLogger( fp, localWeights, USED_WEIGHTS );
// weightsLogger( fp, localWeights, USED_WEIGHTS );
}
fclose(fp9);
double *xErrorPtr = popNAN(xError); // delete NAN values from xError[]
double xErrorLength = *xErrorPtr; // Watch popNAN()!
xErrorPtr[0] = 0.0; // Stored length in [0] , won't be used anyway. Bit dirty
//printf("Xerrorl:%lf", xErrorLength);
xErrorPtr[0] = 0.0; // Stored length in [0] , won't be used anyway. Bit dirty
//printf("Xerrorl:%lf", xErrorLength);
double mean = sum_array(xErrorPtr, xErrorLength) / xErrorLength; // Mean
double deviation = 0.0;
double deviation = 0.0;
for (i = 1; i < xErrorLength; i++) {
@ -347,7 +353,7 @@ void directPredecessor(mldata_t *mlData, point_t points[]) {
printf("mean:%lf, devitation:%lf\t\tdirect Predecessor\n", mean, deviation);
fprintf(fp3, "\nQuadratische Varianz(x_error): %f\nMittelwert:(x_error): %f\n\n", deviation, mean);
fclose(fp3);
//free(localWeights);
free(localWeights);
free(xErrorPtr);
free(xError);
}
@ -362,31 +368,33 @@ differential predecessor.
======================================================================================================
*/
void differentialPredecessor(mldata_t *mlData, point_t points[]) {
double *localWeights = (double *)malloc(sizeof(double) * mlData->windowSize + 1);
void differentialPredecessor ( mldata_t *mlData, point_t points[] ) {
double *localWeights = (double *) malloc ( sizeof(double) * mlData->windowSize + 1 );
localWeights = mlData->weights;
char fileName[512];
double *xError = (double *)malloc(sizeof(double) * mlData->samplesCount + 1);
double *xError = (double *) malloc ( sizeof(double) * mlData->samplesCount + 1);
memset(xError, 0.0, mlData->samplesCount);
unsigned xCount = 0, i;
double xPredicted = 0.0;
double xActual = 0.0;
mkFileName(fileName, sizeof(fileName), DIFFERENTIAL_PREDECESSOR); // File handling
FILE *fp6 = fopen(fileName, "w");
fprintf(fp6, fileHeader(DIFFERENTIAL_PREDECESSOR_HEADER));
fprintf(fp6, fileHeader(DIFFERENTIAL_PREDECESSOR_HEADER) );
mkFileName(fileName, sizeof(fileName), USED_WEIGHTS_DIFF_PRED);
mkFileName ( fileName, sizeof(fileName), USED_WEIGHTS_DIFF_PRED);
FILE *fp9 = fopen(fileName, "w");
for (xCount = 1; xCount < mlData->samplesCount-1; xCount++) { // First value will not get predicted
for (xCount = 1; xCount < mlData->samplesCount-1; xCount++) { // First value will not get predicted
unsigned _arrayLength = (xCount > mlData->windowSize) ? mlData->windowSize + 1 : xCount;
xPredicted = 0.0;
xActual = xSamples[xCount];
for (i = 1; i < _arrayLength; i++) {
xPredicted += (localWeights[i - 1] * (xSamples[xCount - i] - xSamples[xCount - i - 1]));
xPredicted += ( localWeights[i - 1] * (xSamples[xCount - i] - xSamples[xCount - i - 1]));
}
xPredicted += xSamples[xCount - 1];
xError[xCount] = xActual - xPredicted;
@ -395,18 +403,18 @@ void differentialPredecessor(mldata_t *mlData, point_t points[]) {
for (i = 1; i < _arrayLength; i++) {
xSquared += pow(xSamples[xCount - i] - xSamples[xCount - i - 1], 2); // Substract direct predecessor
}
if (xSquared == 0.0) { // Otherwise returns Pred: -1.#IND00 in some occassions
if ( xSquared == 0.0 ) { // Otherwise returns Pred: -1.#IND00 in some occassions
xSquared = 1.0;
}
for (i = 1; i < _arrayLength; i++) {
localWeights[i] = localWeights[i - 1] + mlData->learnrate * xError[xCount]
localWeights[i] = localWeights[i-1] + mlData->learnrate * xError[xCount]
* ((xSamples[xCount - i] - xSamples[xCount - i - 1]) / xSquared);
fprintf(fp9, "%lf\n", localWeights[i]);
fprintf( fp9, "%lf\n", localWeights[i] );
}
fprintf(fp6, "%d\t%f\t%f\t%f\n", xCount, xPredicted, xActual, xError[xCount]); // Write to logfile
fprintf(fp6, "%d\t%f\t%f\t%f\n", xCount, xPredicted, xActual, xError[xCount]); // Write to logfile
points[xCount].xVal[3] = xCount;
points[xCount].yVal[3] = xPredicted;
points[xCount].xVal[6] = xCount;
@ -416,13 +424,13 @@ void differentialPredecessor(mldata_t *mlData, point_t points[]) {
fclose(fp9);
double *xErrorPtr = popNAN(xError); // delete NAN values from xError[]
double xErrorLength = *xErrorPtr; // Watch popNAN()!
xErrorPtr[0] = 0.0;
// printf("Xerrorl:%lf", xErrorLength);
xErrorPtr[0] = 0.0;
// printf("Xerrorl:%lf", xErrorLength);
double mean = sum_array(xErrorPtr, xErrorLength) / xErrorLength;
double deviation = 0.0;
for (i = 1; i < xErrorLength; i++) { // Mean square
deviation += pow(xError[i] - mean, 2);
}
@ -430,12 +438,12 @@ void differentialPredecessor(mldata_t *mlData, point_t points[]) {
printf("mean:%lf, devitation:%lf\t\tdifferential Predecessor\n", mean, deviation);
fprintf(fp6, "\nQuadratische Varianz(x_error): %f\nMittelwert:(x_error): %f\n\n", deviation, mean);
fclose(fp6);
//free(localWeights);
free(localWeights);
free(xErrorPtr);
free(xError);
// weightsLogger( localWeights, USED_WEIGHTS );
// weightsLogger( localWeights, USED_WEIGHTS );
}
/*
@ -453,7 +461,7 @@ char *mkFileName(char* buffer, size_t max_len, int suffixId) {
const char * format_str = "%Y-%m-%d_%H_%M_%S"; // Date formatting
size_t date_len;
const char * suffix = fileSuffix(suffixId);
time_t now = time(NULL);
time_t now = time(NULL);
strftime(buffer, max_len, format_str, localtime(&now)); // Get Date
date_len = strlen(buffer);
@ -470,16 +478,16 @@ Contains and returns every suffix for all existing filenames
======================================================================================================
*/
char * fileSuffix(int id) {
char * suffix[] = { "_weights_pure.txt",
"_weights_used_dir_pred_.txt",
"_direct_predecessor.txt",
"_ergebnisse.txt",
"_localMean.txt",
"_testvalues.txt",
"_differential_predecessor.txt",
"_weights_used_local_mean",
"_weights_used_diff_pred",
char * fileSuffix ( int id ) {
char * suffix[] = { "_weights_pure.txt",
"_weights_used_dir_pred_.txt",
"_direct_predecessor.txt",
"_ergebnisse.txt",
"_localMean.txt",
"_testvalues.txt",
"_differential_predecessor.txt",
"_weights_used_local_mean.txt",
"_weights_used_diff_pred.txt",
};
return suffix[id];
}
@ -489,14 +497,14 @@ char * fileSuffix(int id) {
fileHeader
Contains and returns header from logfiles
Contains and returns header from logfiles
======================================================================================================
*/
char * fileHeader(int id) {
char * header[] = { "\n=========================== Local Mean ===========================\nNo.\txPredicted\txAcutal\t\txError\n",
"\n=========================== Direct Predecessor ===========================\nNo.\txPredicted\txAcutal\t\txError\n",
"\n=========================== Differential Predecessor ===========================\nNo.\txPredicted\txAcutal\t\txError\n"
char * fileHeader ( int id ) {
char * header[] = { "\n=========================== Local Mean ===========================\nNo.\txPredicted\txAcutal\t\txError\n",
"\n=========================== Direct Predecessor ===========================\nNo.\txPredicted\txAcutal\t\txError\n",
"\n=========================== Differential Predecessor ===========================\nNo.\txPredicted\txAcutal\t\txError\n"
};
return header[id];
}
@ -510,7 +518,7 @@ Logs used weights to logfile
======================================================================================================
*/
void weightsLogger(double *weights, int val) {
void weightsLogger (double *weights, int val ) {
char fileName[512];
unsigned i;
mkFileName(fileName, sizeof(fileName), val);
@ -526,21 +534,21 @@ void weightsLogger(double *weights, int val) {
bufferLogger
formats output of mkSvgGraph -- Please open graphResults.html to see the output--
[0] = xActual,
[1] = xPredicted from localMean,
[2] = xPredicted from directPredecessor,
[3] = xPredicted from differentialpredecessor,
[4] = xError from localMean,
[5] = xError from directPredecessor,
[6] = xError from differentialPredecessor
formats output of mkSvgGraph -- Please open graphResults.html to see the output--
[0] = xActual,
[1] = xPredicted from localMean,
[2] = xPredicted from directPredecessor,
[3] = xPredicted from differentialpredecessor,
[4] = xError from localMean,
[5] = xError from directPredecessor,
[6] = xError from differentialPredecessor
======================================================================================================
*/
void bufferLogger(char *buffer, point_t points[]) {
unsigned i;
char _buffer[512] = ""; // TODO: resize buffer and _buffer so greater sampleval can be choosen
// char *_buffer = (char *) malloc ( sizeof(char) * 512 + 1);
// char *_buffer = (char *) malloc ( sizeof(char) * 512 + 1);
for (i = 1; i < mlData->samplesCount - 1; i++) { // xActual
sprintf(_buffer, "L %f %f\n", points[i].xVal[0], points[i].yVal[0]);
strcat(buffer, _buffer);
@ -589,7 +597,7 @@ double sum_array(double x[], int xlength) {
popNan
returns new array without NAN values
returns new array without NAN values
======================================================================================================
*/
@ -599,21 +607,21 @@ double *popNAN(double *xError) {
double *tmp = NULL;
double *more_tmp = NULL;
for (i = 0; i < mlData->samplesCount - 1; i++) {
counter++;
more_tmp = (double *)realloc(tmp, counter*(sizeof(double)));
if (!isnan(xError[i])) {
tmp = more_tmp;
tmp[counter - 1] = xError[i];
//printf("xERROR:%lf\n", tmp[counter - 1]);
tmpLength++;
}
for ( i = 0; i < mlData->samplesCount - 1; i++ ) {
counter ++;
more_tmp = (double *) realloc ( tmp, counter*(sizeof(double) ));
if ( !isnan(xError[i]) ) {
tmp = more_tmp;
tmp[counter - 1] = xError[i];
//printf("xERROR:%lf\n", tmp[counter - 1]);
tmpLength++;
}
}
counter += 1;
more_tmp = (double *)realloc(tmp, counter * sizeof(double));
more_tmp = (double *) realloc ( tmp, counter * sizeof(double) );
tmp = more_tmp;
*tmp = tmpLength; // Length of array is stored inside tmp[0]. tmp[0] is never used anyways
return tmp;
}
@ -668,14 +676,14 @@ void mkSvgGraph(point_t points[]) {
fseek(input, 0, SEEK_END);
long fpLength = ftell(input);
fseek(input, 0, SEEK_SET);
char buffer[131072] = ""; // Bit dirty
// char *buffer = (char *) malloc ( sizeof(char) * ( ( 3 * mlData->samplesCount ) + fpLength + 1 ) );
// char *buffer = (char *) malloc ( sizeof(char) * ( ( 3 * mlData->samplesCount ) + fpLength + 1 ) );
memset(buffer, '\0', sizeof(buffer));
while (!feof(input)) { // parses file until "firstGraph" has been found
fgets(line, 512, input);
fgets(line, 512, input);
strncat(buffer, line, strlen(line));
if (strstr(line, firstGraph) != NULL) { // Compares line <-> "firstGraph"
bufferLogger(buffer, points); // write points
@ -723,30 +731,30 @@ static imagePixel_t *rdPPM(char *fileName) {
c = getc(fp);
}
ungetc(c, fp);
if (fscanf(fp, "%d %d", &image->x, &image->y) != 2) {
if ( fscanf(fp, "%d %d", &image->x, &image->y) != 2 ) {
fprintf(stderr, "Invalid image size in %s\n", fileName);
exit(EXIT_FAILURE);
}
if (fscanf(fp, "%d", &rgbColor) != 1) {
if ( fscanf(fp, "%d", &rgbColor) != 1 ) {
fprintf(stderr, "Invalid rgb component in %s\n", fileName);
}
if (rgbColor != RGB_COLOR) {
if ( rgbColor != RGB_COLOR ) {
fprintf(stderr, "Invalid image color range in %s\n", fileName);
exit(EXIT_FAILURE);
}
while (fgetc(fp) != '\n');
while ( fgetc(fp) != '\n' );
image->data = (colorChannel_t *)malloc(image->x * image->y * sizeof(imagePixel_t));
if (!image) {
fprintf(stderr, "malloc() on image->data failed");
exit(EXIT_FAILURE);
}
if ((image->x * image->y) < mlData->samplesCount) {
printf("Changing \"-n\" to %d, image max data size\n", (image->x * image->y));
tmp = (double *)realloc(xSamples, sizeof(double) * (image->x * image->y));
if ( (image->x * image->y) < mlData->samplesCount) {
printf("Changing \"-n\" to %d, image max data size\n", ( image->x * image->y ) );
tmp = (double *) realloc ( xSamples, sizeof(double) * (image->x * image->y) );
xSamples = tmp;
mlData->samplesCount = (image->x * image->y) / sizeof(double);
mlData->samplesCount = (image->x * image->y ) / sizeof(double);
}
if (fread(image->data, 3 * image->x, image->y, fp) != image->y) {
if ( fread( image->data, 3 * image->x, image->y, fp) != image->y) {
fprintf(stderr, "Loading image failed");
exit(EXIT_FAILURE);
}
@ -790,26 +798,23 @@ int ppmColorChannel(FILE* fp, imagePixel_t *image, char *colorChannel, mldata_t
unsigned i = 0;
printf("colorChannel : %s\n", colorChannel);
if (image) { // RGB channel can be set through args from cli
if (strcmp(colorChannel, "green") == 0) {
for (i = 0; i < mlData->samplesCount - 1; i++) {
fprintf(fp, "%d\n", image->data[i].green);
if ( image ) { // RGB channel can be set through args from cli
if ( strcmp(colorChannel, "green") == 0 ){
for ( i = 0; i < mlData->samplesCount - 1; i++ ) {
fprintf ( fp, "%d\n", image->data[i].green );
}
}
else if (strcmp(colorChannel, "red") == 0) {
for (i = 0; i < mlData->samplesCount - 1; i++) {
fprintf(fp, "%d\n", image->data[i].red);
} else if ( strcmp(colorChannel, "red") == 0 ){
for ( i = 0; i < mlData->samplesCount - 1; i++ ) {
fprintf ( fp, "%d\n", image->data[i].red );
}
} else if ( strcmp(colorChannel, "blue") == 0 ) {
for ( i = 0; i < mlData->samplesCount - 1; i++ ) {
fprintf ( fp, "%d\n", image->data[i].blue );
}
}
else if (strcmp(colorChannel, "blue") == 0) {
for (i = 0; i < mlData->samplesCount - 1; i++) {
fprintf(fp, "%d\n", image->data[i].blue);
}
}
else {
} else {
printf("Colorchannels are red, green and blue. Pick one of them!");
exit(EXIT_FAILURE);
exit( EXIT_FAILURE );
}
}
fclose(fp);
@ -826,12 +831,12 @@ creating the SVG graph
======================================================================================================
*/
void colorSamples(FILE* fp, mldata_t *mlData) {
void colorSamples ( FILE* fp, mldata_t *mlData ) {
int i = 0;
char *buffer = (char *)malloc(sizeof(char) * mlData->samplesCount + 1);
char *buffer = (char *) malloc(sizeof(char) * mlData->samplesCount + 1);
while (!feof(fp)) {
if (fgets(buffer, mlData->samplesCount, fp) != NULL) {
if (fgets(buffer, mlData->samplesCount, fp) != NULL) {
sscanf(buffer, "%lf", &xSamples[i]);
//printf("%lf\n", xSamples[i] );
points[i].yVal[0] = xSamples[i]; // Fills points so actual input values can be seen as a graph
@ -847,7 +852,7 @@ void colorSamples(FILE* fp, mldata_t *mlData) {
windowXMean
returns mean value of given input
returns mean value of given input
======================================================================================================
*/
@ -856,7 +861,7 @@ double windowXMean(int _arraylength, int xCount) {
double *ptr;
for (ptr = &xSamples[xCount - _arraylength]; ptr != &xSamples[xCount]; ptr++) { // Set ptr to beginning of window
sum += *ptr;
sum += *ptr;
}
return sum / (double)_arraylength;
}
@ -864,43 +869,43 @@ double windowXMean(int _arraylength, int xCount) {
/*
======================================================================================================
usage
used in conjunction with the args parser. Returns help section of "-h"
usage
used in conjunction with the args parser. Returns help section of "-h"
======================================================================================================
*/
void usage(char **argv) {
void usage ( char **argv ) {
printf("Usage: %s [POSIX style options] -i file ...\n", &argv[0][0]);
printf("POSIX options:\n");
printf("\t-h\t\t\tDisplay this information.\n");
printf("\t-i <filename>\t\tName of inputfile. Must be PPM image.\n");
printf("\t-n <digit>\t\tAmount of input data used.\n");
printf("\t-n <digit>\t\tAmount of input data used.\n");
printf("\t-c <color>\t\tUse this color channel from inputfile.\n");
printf("\t-w <digit>\t\tCount of used weights (windowSize).\n");
printf("\t-l <digit>\t\tLearnrate, 0 < learnrate < 1.\n");
printf("\t-l <digit>\t\tLearnrate, 0 < learnrate < 1.\n");
printf("\t-x true\t\t\tLogfiles only, no graph building.\n\t\t\t\tChoose for intense amount of input data.\n");
printf("\t-s <digit>\t\tDigit for random seed generator.\n\t\t\t\tSame Digits produce same random values. Default is srand by time.\n");
printf("\t-s <digit>\t\tDigit for random seed generator.\n\t\t\t\tSame Digits produce same random values. Default is srand by time.\n");
printf("\n\n");
printf("%s compares prediction methods of least mean square filters.\nBy default it reads ppm file format and return logfiles as well\nas an svg graphs as an output of said least mean square methods.\n\nExample:\n\t%s -i myimage.ppm -w 3 -c green -s 5 -x true\n", &argv[0][0], &argv[0][0]);
printf("%s compares prediction methods of least mean square filters.\nBy default it reads ppm file format and return logfiles as well\nas an svg graphs as an output of said least mean square methods.\n\nExample:\n\t%s -i myimage.ppm -w 3 -c green -s 5 -x true\n", &argv[0][0], &argv[0][0]);
exit(8);
}
/*
======================================================================================================
init_mldata_t
Contains meachine learning data
init_mldata_t
Contains meachine learning data
======================================================================================================
*/
mldata_t * init_mldata_t(unsigned windowSize, unsigned samplesCount, double learnrate) {
mlData = (mldata_t *)malloc(sizeof(mldata_t));
mlData = (mldata_t *) malloc( sizeof(mldata_t) );
mlData->windowSize = windowSize;
mlData->samplesCount = samplesCount;
mlData->learnrate = learnrate;
mlData->weights = (double *)malloc(sizeof(double) * windowSize + 1);
mlData->weights = (double *) malloc ( sizeof(double) * windowSize + 1 );
return mlData;
}

View File

@ -23,11 +23,11 @@ There are a bunch of options you can predefine but do not have to. The only para
| Parameter | Description | StdVal |
|:----------|:-----------------------------:|:-------|
| -i | The inputfile, has to be PPM | none |
| -n | Amount of input data used | 500 |
| -w | Size of M (window) | 5 |
| -i | The inputfile, has to be PPM. | none |
| -n | Amount of input data used. | 500 |
| -w | Size of M (window). | 5 |
| -c | Choose RGB color channel, green has least noise. | green |
| -l | Learnrate of machine learning | 0.4 |
| -x | Exclude graph building. Logfiles only, choose for insane amount of input data. 10Mio. Pixels tested so far.| none|
| -l | Learnrate of machine learning. | 0.4 |
| -g true | include graph building. Choose for amount of input data lower than 1200.| none|
| -s | Seed randomizing weights. Choose for repoducability. | time(NULL)|