Hello there.
I'm heaving trouble with a C Program that I'm writing to simulate a Multi-Layer Neural Network. It compiles without any issues, but when I run it, it crashes instantanously. I would be very thankful if anyone could give me a hint. The code is below:
/* Multi layer network to classification of data */
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include <time.h>
#define MAX_SIZE 1000
#define LRATE 1e-5
#define ERR_LIMIT 1e-3
#define MAX_TIME 55
#define INPUTS 2
#define NEURONSLAYER1 4
#define NEURONSLAYER2 3
#define NEURONSLAYER3 1
#define sqr(x) ((x) * (x))
float max1,max2;
void mapping1(float in[][INPUTS+1], int n) {
int i, j;
if(in[0][1]<0) max1 = (in[0][1])*(-1.0);
else max1=in[0][1];
printf("%f\n", max1);
for(i=1;i<n-1;i++)
{
if(in[i][1]>max1 && in[i][1] < 0)max1 = in[i][1]*(-1.0);
if(in[i][1]>max1 && in[i][1] >= 0)max1 = in[i][1];
}
printf("%f\n", max1);
for(j=0;j<n-1;j++)
{
in[j][1] = in[j][1]/max1;
}
return;
}
void mapping2(float in[][INPUTS+1], int n) {
int i, j;
if(in[0][2]<0) max2 = in[0][2]*(-1.0);
else max2=in[0][2];
printf("%f\n", max2);
for(i=1;i<n-1;i++)
{
if(in[i][2]>max2 && in[i][2] < 0)max2 = in[i][2]*(-1.0);
if(in[i][2]>max2 && in[i][2] >= 0)max2 = in[i][2];
}
printf("%f\n", max2);
for(j=0;j<n-1;j++)
{
in[j][2] = in[j][2]/max2;
}
return;
}
int main (){
//Network parameters
float Input[MAX_SIZE][INPUTS +1];
float HLayer1In[NEURONSLAYER1 +1];
float HLayer1Out[NEURONSLAYER1 +1];
float HLayer2In[NEURONSLAYER2 +1];
float HLayer2Out[NEURONSLAYER2 +1];
float OLayerIn[NEURONSLAYER3 +1];
float OLayerOut[NEURONSLAYER3 +1];
float WeightsH1In[NEURONSLAYER1 +1][INPUTS +1];
float WeightsH2H1[NEURONSLAYER2 +1][NEURONSLAYER1 +1];
float WeightsOH2[NEURONSLAYER3 +1][NEURONSLAYER2 +1];
float BiasO;
//Training Parameters
float lin_error = 0, error_sum = 0, avg_error = 0;
float DevOutH1[NEURONSLAYER1 +1];
float DevOutH2[NEURONSLAYER2 +1];
float DevOutO[NEURONSLAYER3 +1];
int ExpectedOut[MAX_SIZE];
float deltaOut = 0;
float deltaLayer2[NEURONSLAYER2 +1];
float deltaLayer1[NEURONSLAYER1 +1];
float sumLayer1[NEURONSLAYER1 +1];
int i; // Index for Inputs
int j; // Index for Hidden 1
int k; // Index for Hidden 2
int m; // Index for Output
int n,l; // Index for test values
int flag = 0; // Indicates when the training is over
int read; // Loop variable
float c, d; //Store the maximum values for normalization
time_t start = time(NULL); // Timer variable
srand((unsigned)time(NULL));
// Initialize weights
for(i=1;i<INPUTS+1;i++)
{
for(j=0;j<NEURONSLAYER1+1;j++)
{
WeightsH1In[j][i] = (rand()%10)*pow(10,-(rand()%5));
}
}
for(j=1;j<NEURONSLAYER1+1;j++)
{
for(k=0;k<NEURONSLAYER2+1;k++)
{
WeightsH2H1[k][j] = (rand()%10)*pow(10,-(rand()%5));
}
}
for(k=1;k<NEURONSLAYER2+1;k++)
{
for(m=0;k<NEURONSLAYER3+1;m++)
{
WeightsOH2[m][k] = (rand()%10)*pow(10,-(rand()%5));
}
}
BiasO = (rand()%10)*pow(10,-(rand()%5));
n = 0;
l = 0;
j = 0;
k = 0;
m = 0;
i = 0;
do{
if(flag == 1)
{
read = scanf("%f, %f", &Input[n][1], &Input[n][2]);
if(read == EOF) return 0;
//Calculate the Outputs of Neurons in the first Layer and also their derivatives
Input[n][1] = Input[n][1]/c;
Input[n][2] = Input[n][2]/d;
for(j=1;j<NEURONSLAYER1+1;j++)
{
HLayer1In[j] = WeightsH1In[0][j];
for(i=1;i<INPUTS+1;i++)
{
HLayer1In[j] += Input[l][i]*WeightsH1In[j][i];
}
HLayer1Out[j] = tanh(HLayer1In[j]);
}
//Calculate the Outputs of Neurons in the second Layer and also their derivatives
for(k=1;k<NEURONSLAYER2+k;j++)
{
HLayer2In[k] = WeightsH2H1[0][k];
for(j=1;j<NEURONSLAYER1+1;j++)
{
HLayer2In[k] += HLayer1Out[j]*WeightsH2H1[k][j];
}
HLayer2Out[k] = tanh(HLayer2In[k]);
}
//Calculate the Outputs of Neurons in the output Layer and also their derivatives
for(m=1;m<NEURONSLAYER3+1;m++)
{
OLayerIn[m] = BiasO;
for(k=1;k<NEURONSLAYER2+1;k++)
{
OLayerIn[m] += HLayer2Out[k]*WeightsOH2[m][k];
}
OLayerOut[m] = tanh(OLayerIn[m]);
}
n++;
if(OLayerOut[m-1] > 0) printf("+1\n");
if(OLayerOut[m-1] <= 0) printf("-1\n");
}
//Training Routine
else
{
//Read Training
read = scanf("%f,%f,%d", &Input[n][1], &Input[n][2], &ExpectedOut[n]);
n++;
//Check condition for initialization of training
if(Input[n-1][1] == 0 && Input[n-1][2] == 0 && ExpectedOut[n-1] == 0)
{
mapping1(Input,n);
c = max1;
mapping2(Input,n);
d = max2;
//Begin Training
do{
//If the program takes too long, break
if((time(NULL) - start) > MAX_TIME)
{
flag = 1;
break;
}
//If the set is over but the training isn't, restart
if(l == (n-1))
{
l=0;
error_sum = 0;
}
//Initiates over the training set
for(l=0;l<n-1;l++)
{
for(j=1;j<NEURONSLAYER1+1;j++)
{
sumLayer1[j] = 0;
}
//Calculate the Outputs of Neurons in the first Layer and also their derivatives
for(j=1;j<NEURONSLAYER1+1;j++)
{
HLayer1In[j] = WeightsH1In[0][j];
for(i=1;i<INPUTS+1;i++)
{
HLayer1In[j] += Input[l][i]*WeightsH1In[j][i];
}
HLayer1Out[j] = tanh(HLayer1In[j]);
DevOutH1[j] = 1.0/(sqr(cosh(HLayer1In[j])));
}
//Calculate the Outputs of Neurons in the second Layer and also their derivatives
for(k=1;k<NEURONSLAYER2+k;j++)
{
HLayer2In[k] = WeightsH2H1[0][k];
for(j=1;j<NEURONSLAYER1+1;j++)
{
HLayer2In[k] += HLayer1Out[j]*WeightsH2H1[k][j];
}
HLayer2Out[k] = tanh(HLayer2In[k]);
DevOutH2[k] = 1.0/(sqr(cosh(HLayer2In[k])));
}
//Calculate the Outputs of Neurons in the output Layer and also their derivatives
for(m=1;m<NEURONSLAYER3+1;m++)
{
OLayerIn[m] = BiasO;
for(k=1;k<NEURONSLAYER2+1;k++)
{
OLayerIn[m] += HLayer2Out[k]*WeightsOH2[m][k];
}
OLayerOut[m] = tanh(OLayerIn[m]);
DevOutO[m] = 1.0/(sqr(cosh(OLayerIn[m])));
}
//Calculate error
lin_error = ExpectedOut[n] - OLayerOut[m];
error_sum += sqr(lin_error);
deltaOut = lin_error*DevOutO[m];
//Propagate error
for(m=1;m<NEURONSLAYER3 +1;m++)
{
for(k=1;k<NEURONSLAYER2 +1;k++)
{
deltaLayer2[k] = WeightsOH2[m][k]*deltaOut*DevOutH2[k];
}
}
//-----------------------------------------------------------
for(j=1;j<NEURONSLAYER1 +1;j++)
{
for(k=1;k<NEURONSLAYER2 +1;k++)
{
sumLayer1[j]+=WeightsH2H1[k][j]*deltaLayer2[k];
}
}
for(j=1;j<NEURONSLAYER1 +j;k++)
{
deltaLayer1[j]=sumLayer1[j]*DevOutH1[j];
}
//Update weights
//---Output Layer
BiasO += 2.0*LRATE*deltaOut;
for(m=1;m<NEURONSLAYER3 +1;m++)
{
for(k=1;k<NEURONSLAYER2 +1;k++)
{
WeightsOH2[m][k] += LRATE*deltaOut*HLayer2Out[k];
}
}
//---Hidden Layer 2
for(k=1;k<NEURONSLAYER2 +1;k++)
{
WeightsH2H1[0][k] += LRATE*deltaLayer2[k];
for(j=1;j<NEURONSLAYER1 +1;j++)
{
WeightsH2H1[k][j] += LRATE*deltaLayer2[k]*HLayer1Out[j];
}
}
//---Hidden Layer 1
for(j=1;j<NEURONSLAYER1 +1;j++)
{
WeightsH1In[0][j] += LRATE*deltaLayer1[j];
for(i=1;i<INPUTS +1;i++)
{
WeightsH1In[j][i] += LRATE*deltaLayer1[j]*HLayer1Out[i];
}
}
}
avg_error = error_sum/l;
}while(avg_error > ERR_LIMIT );
flag = 1;
}
}
}while(read != EOF);
return 0;
}
I'm heaving trouble with a C Program that I'm writing to simulate a Multi-Layer Neural Network. It compiles without any issues, but when I run it, it crashes instantanously. I would be very thankful if anyone could give me a hint. The code is below:
/* Multi layer network to classification of data */
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include <time.h>
#define MAX_SIZE 1000
#define LRATE 1e-5
#define ERR_LIMIT 1e-3
#define MAX_TIME 55
#define INPUTS 2
#define NEURONSLAYER1 4
#define NEURONSLAYER2 3
#define NEURONSLAYER3 1
#define sqr(x) ((x) * (x))
float max1,max2;
void mapping1(float in[][INPUTS+1], int n) {
int i, j;
if(in[0][1]<0) max1 = (in[0][1])*(-1.0);
else max1=in[0][1];
printf("%f\n", max1);
for(i=1;i<n-1;i++)
{
if(in[i][1]>max1 && in[i][1] < 0)max1 = in[i][1]*(-1.0);
if(in[i][1]>max1 && in[i][1] >= 0)max1 = in[i][1];
}
printf("%f\n", max1);
for(j=0;j<n-1;j++)
{
in[j][1] = in[j][1]/max1;
}
return;
}
void mapping2(float in[][INPUTS+1], int n) {
int i, j;
if(in[0][2]<0) max2 = in[0][2]*(-1.0);
else max2=in[0][2];
printf("%f\n", max2);
for(i=1;i<n-1;i++)
{
if(in[i][2]>max2 && in[i][2] < 0)max2 = in[i][2]*(-1.0);
if(in[i][2]>max2 && in[i][2] >= 0)max2 = in[i][2];
}
printf("%f\n", max2);
for(j=0;j<n-1;j++)
{
in[j][2] = in[j][2]/max2;
}
return;
}
int main (){
//Network parameters
float Input[MAX_SIZE][INPUTS +1];
float HLayer1In[NEURONSLAYER1 +1];
float HLayer1Out[NEURONSLAYER1 +1];
float HLayer2In[NEURONSLAYER2 +1];
float HLayer2Out[NEURONSLAYER2 +1];
float OLayerIn[NEURONSLAYER3 +1];
float OLayerOut[NEURONSLAYER3 +1];
float WeightsH1In[NEURONSLAYER1 +1][INPUTS +1];
float WeightsH2H1[NEURONSLAYER2 +1][NEURONSLAYER1 +1];
float WeightsOH2[NEURONSLAYER3 +1][NEURONSLAYER2 +1];
float BiasO;
//Training Parameters
float lin_error = 0, error_sum = 0, avg_error = 0;
float DevOutH1[NEURONSLAYER1 +1];
float DevOutH2[NEURONSLAYER2 +1];
float DevOutO[NEURONSLAYER3 +1];
int ExpectedOut[MAX_SIZE];
float deltaOut = 0;
float deltaLayer2[NEURONSLAYER2 +1];
float deltaLayer1[NEURONSLAYER1 +1];
float sumLayer1[NEURONSLAYER1 +1];
int i; // Index for Inputs
int j; // Index for Hidden 1
int k; // Index for Hidden 2
int m; // Index for Output
int n,l; // Index for test values
int flag = 0; // Indicates when the training is over
int read; // Loop variable
float c, d; //Store the maximum values for normalization
time_t start = time(NULL); // Timer variable
srand((unsigned)time(NULL));
// Initialize weights
for(i=1;i<INPUTS+1;i++)
{
for(j=0;j<NEURONSLAYER1+1;j++)
{
WeightsH1In[j][i] = (rand()%10)*pow(10,-(rand()%5));
}
}
for(j=1;j<NEURONSLAYER1+1;j++)
{
for(k=0;k<NEURONSLAYER2+1;k++)
{
WeightsH2H1[k][j] = (rand()%10)*pow(10,-(rand()%5));
}
}
for(k=1;k<NEURONSLAYER2+1;k++)
{
for(m=0;k<NEURONSLAYER3+1;m++)
{
WeightsOH2[m][k] = (rand()%10)*pow(10,-(rand()%5));
}
}
BiasO = (rand()%10)*pow(10,-(rand()%5));
n = 0;
l = 0;
j = 0;
k = 0;
m = 0;
i = 0;
do{
if(flag == 1)
{
read = scanf("%f, %f", &Input[n][1], &Input[n][2]);
if(read == EOF) return 0;
//Calculate the Outputs of Neurons in the first Layer and also their derivatives
Input[n][1] = Input[n][1]/c;
Input[n][2] = Input[n][2]/d;
for(j=1;j<NEURONSLAYER1+1;j++)
{
HLayer1In[j] = WeightsH1In[0][j];
for(i=1;i<INPUTS+1;i++)
{
HLayer1In[j] += Input[l][i]*WeightsH1In[j][i];
}
HLayer1Out[j] = tanh(HLayer1In[j]);
}
//Calculate the Outputs of Neurons in the second Layer and also their derivatives
for(k=1;k<NEURONSLAYER2+k;j++)
{
HLayer2In[k] = WeightsH2H1[0][k];
for(j=1;j<NEURONSLAYER1+1;j++)
{
HLayer2In[k] += HLayer1Out[j]*WeightsH2H1[k][j];
}
HLayer2Out[k] = tanh(HLayer2In[k]);
}
//Calculate the Outputs of Neurons in the output Layer and also their derivatives
for(m=1;m<NEURONSLAYER3+1;m++)
{
OLayerIn[m] = BiasO;
for(k=1;k<NEURONSLAYER2+1;k++)
{
OLayerIn[m] += HLayer2Out[k]*WeightsOH2[m][k];
}
OLayerOut[m] = tanh(OLayerIn[m]);
}
n++;
if(OLayerOut[m-1] > 0) printf("+1\n");
if(OLayerOut[m-1] <= 0) printf("-1\n");
}
//Training Routine
else
{
//Read Training
read = scanf("%f,%f,%d", &Input[n][1], &Input[n][2], &ExpectedOut[n]);
n++;
//Check condition for initialization of training
if(Input[n-1][1] == 0 && Input[n-1][2] == 0 && ExpectedOut[n-1] == 0)
{
mapping1(Input,n);
c = max1;
mapping2(Input,n);
d = max2;
//Begin Training
do{
//If the program takes too long, break
if((time(NULL) - start) > MAX_TIME)
{
flag = 1;
break;
}
//If the set is over but the training isn't, restart
if(l == (n-1))
{
l=0;
error_sum = 0;
}
//Initiates over the training set
for(l=0;l<n-1;l++)
{
for(j=1;j<NEURONSLAYER1+1;j++)
{
sumLayer1[j] = 0;
}
//Calculate the Outputs of Neurons in the first Layer and also their derivatives
for(j=1;j<NEURONSLAYER1+1;j++)
{
HLayer1In[j] = WeightsH1In[0][j];
for(i=1;i<INPUTS+1;i++)
{
HLayer1In[j] += Input[l][i]*WeightsH1In[j][i];
}
HLayer1Out[j] = tanh(HLayer1In[j]);
DevOutH1[j] = 1.0/(sqr(cosh(HLayer1In[j])));
}
//Calculate the Outputs of Neurons in the second Layer and also their derivatives
for(k=1;k<NEURONSLAYER2+k;j++)
{
HLayer2In[k] = WeightsH2H1[0][k];
for(j=1;j<NEURONSLAYER1+1;j++)
{
HLayer2In[k] += HLayer1Out[j]*WeightsH2H1[k][j];
}
HLayer2Out[k] = tanh(HLayer2In[k]);
DevOutH2[k] = 1.0/(sqr(cosh(HLayer2In[k])));
}
//Calculate the Outputs of Neurons in the output Layer and also their derivatives
for(m=1;m<NEURONSLAYER3+1;m++)
{
OLayerIn[m] = BiasO;
for(k=1;k<NEURONSLAYER2+1;k++)
{
OLayerIn[m] += HLayer2Out[k]*WeightsOH2[m][k];
}
OLayerOut[m] = tanh(OLayerIn[m]);
DevOutO[m] = 1.0/(sqr(cosh(OLayerIn[m])));
}
//Calculate error
lin_error = ExpectedOut[n] - OLayerOut[m];
error_sum += sqr(lin_error);
deltaOut = lin_error*DevOutO[m];
//Propagate error
for(m=1;m<NEURONSLAYER3 +1;m++)
{
for(k=1;k<NEURONSLAYER2 +1;k++)
{
deltaLayer2[k] = WeightsOH2[m][k]*deltaOut*DevOutH2[k];
}
}
//-----------------------------------------------------------
for(j=1;j<NEURONSLAYER1 +1;j++)
{
for(k=1;k<NEURONSLAYER2 +1;k++)
{
sumLayer1[j]+=WeightsH2H1[k][j]*deltaLayer2[k];
}
}
for(j=1;j<NEURONSLAYER1 +j;k++)
{
deltaLayer1[j]=sumLayer1[j]*DevOutH1[j];
}
//Update weights
//---Output Layer
BiasO += 2.0*LRATE*deltaOut;
for(m=1;m<NEURONSLAYER3 +1;m++)
{
for(k=1;k<NEURONSLAYER2 +1;k++)
{
WeightsOH2[m][k] += LRATE*deltaOut*HLayer2Out[k];
}
}
//---Hidden Layer 2
for(k=1;k<NEURONSLAYER2 +1;k++)
{
WeightsH2H1[0][k] += LRATE*deltaLayer2[k];
for(j=1;j<NEURONSLAYER1 +1;j++)
{
WeightsH2H1[k][j] += LRATE*deltaLayer2[k]*HLayer1Out[j];
}
}
//---Hidden Layer 1
for(j=1;j<NEURONSLAYER1 +1;j++)
{
WeightsH1In[0][j] += LRATE*deltaLayer1[j];
for(i=1;i<INPUTS +1;i++)
{
WeightsH1In[j][i] += LRATE*deltaLayer1[j]*HLayer1Out[i];
}
}
}
avg_error = error_sum/l;
}while(avg_error > ERR_LIMIT );
flag = 1;
}
}
}while(read != EOF);
return 0;
}