-
Notifications
You must be signed in to change notification settings - Fork 9
/
Copy pathnxt.c
127 lines (119 loc) · 5.67 KB
/
nxt.c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
#include<stdlib.h>
#include<math.h>
#include<string.h>
//#include<cstring>
//namespace std
#define NUMPAT 4
#define NUMIN 2
#define NUMHID 4
#define NUMOUT 1
#define E 2.71828183
#define rando() (Random()/(RAND_MAX+1))
//Global declarations
int i, j, k, p, np, op, ranpat[NUMPAT+1], epoch;
int NumPattern = NUMPAT, NumInput = NUMIN, NumHidden = NUMHID, NumOutput = NUMOUT;
float Input[NUMPAT+1][NUMIN+1] = { {0, 0, 0}, {0, 0, 0},{ 0, 1, 0},{ 0, 0, 1},{ 0, 1, 1} };
float Target[NUMPAT+1][NUMOUT+1] = { {0, 0}, {0, 0}, {0, 1}, {0, 1}, {0, 0} };
float SumH[NUMPAT+1][NUMHID+1], WeightIH[NUMIN+1][NUMHID+1], Hidden[NUMPAT+1][NUMHID+1];
float SumO[NUMPAT+1][NUMOUT+1], WeightHO[NUMHID+1][NUMOUT+1], Output[NUMPAT+1][NUMOUT+1];
float DeltaO[NUMOUT+1], SumDOW[NUMHID+1], DeltaH[NUMHID+1];
float DeltaWeightIH[NUMIN+1][NUMHID+1], DeltaWeightHO[NUMHID+1][NUMOUT+1];
float Error, eta = 0.5, alpha = 0.9, smallwt = 0.5;
//Initialize the neural network
void Initialize_Net()
{
for( j = 1 ; j <= NumHidden ; j++ ) { /* initialize WeightIH and DeltaWeightIH */
for( i = 0 ; i <= NumInput ; i++ ) {
DeltaWeightIH[i][j] = 0.0 ;
WeightIH[i][j] = 2.0 * ( rando() - 0.5 ) * smallwt ;
}
}
for( k = 1 ; k <= NumOutput ; k ++ ) { /* initialize WeightHO and DeltaWeightHO */
for( j = 0 ; j <= NumHidden ; j++ ) {
DeltaWeightHO[j][k] = 0.0 ;
WeightHO[j][k] = 2.0 * ( rando() - 0.5 ) * smallwt ;
}
}
}
//task main() {
int main() {
Initialize_Net();
for( epoch = 0 ; epoch < 100 ; epoch++) { /* iterate weight updates */
for( p = 1 ; p <= NumPattern ; p++ ) { /* randomize order of individuals */
ranpat[p] = p ;
}
for( p = 1 ; p <= NumPattern ; p++) {
np = p + rando() * ( NumPattern + 1 - p ) ;
op = ranpat[p] ; ranpat[p] = ranpat[np] ; ranpat[np] = op ;
}
Error = 0.0 ;
for( np = 1 ; np <= NumPattern ; np++ ) { /* repeat for all the training patterns */
p = ranpat[np];
for( j = 1 ; j <= NumHidden ; j++ ) { /* compute hidden unit activations */
SumH[p][j] = WeightIH[0][j] ;
for( i = 1 ; i <= NumInput ; i++ ) {
SumH[p][j] += Input[p][i] * WeightIH[i][j] ;
}
Hidden[p][j] = 1.0/(1.0 + pow(E, (-SumH[p][j]))) ;
//Hidden[p][j] = 1.0/(1.0 + E^(-SumH[p][j])) ;
}
for( k = 1 ; k <= NumOutput ; k++ ) { /* compute output unit activations and errors */
SumO[p][k] = WeightHO[0][k] ;
for( j = 1 ; j <= NumHidden ; j++ ) {
SumO[p][k] += Hidden[p][j] * WeightHO[j][k] ;
}
Output[p][k] = 1.0/(1.0 + pow(E, (-SumO[p][k]))) ; /* Sigmoidal Outputs */
//Output[p][k] = 1.0/(1.0 + E^(-SumO[p][k])) ; /* Sigmoidal Outputs */
/* Output[p][k] = SumO[p][k]; Linear Outputs */
Error += 0.5 * (Target[p][k] - Output[p][k]) * (Target[p][k] - Output[p][k]) ; /* SSE */
/* Error -= ( Target[p][k] * log( Output[p][k] ) + ( 1.0 - Target[p][k] ) * log( 1.0 - Output[p][k] ) ) ; Cross-Entropy Error */
DeltaO[k] = (Target[p][k] - Output[p][k]) * Output[p][k] * (1.0 - Output[p][k]) ; /* Sigmoidal Outputs, SSE */
/* DeltaO[k] = Target[p][k] - Output[p][k]; Sigmoidal Outputs, Cross-Entropy Error */
/* DeltaO[k] = Target[p][k] - Output[p][k]; Linear Outputs, SSE */
}
for( j = 1 ; j <= NumHidden ; j++ ) { /* 'back-propagate' errors to hidden layer */
SumDOW[j] = 0.0 ;
for( k = 1 ; k <= NumOutput ; k++ ) {
SumDOW[j] += WeightHO[j][k] * DeltaO[k] ;
}
DeltaH[j] = SumDOW[j] * Hidden[p][j] * (1.0 - Hidden[p][j]) ;
}
for( j = 1 ; j <= NumHidden ; j++ ) { /* update weights WeightIH */
DeltaWeightIH[0][j] = eta * DeltaH[j] + alpha * DeltaWeightIH[0][j] ;
WeightIH[0][j] += DeltaWeightIH[0][j] ;
for( i = 1 ; i <= NumInput ; i++ ) {
DeltaWeightIH[i][j] = eta * Input[p][i] * DeltaH[j] + alpha * DeltaWeightIH[i][j];
WeightIH[i][j] += DeltaWeightIH[i][j] ;
}
}
for( k = 1 ; k <= NumOutput ; k ++ ) { /* update weights WeightHO */
DeltaWeightHO[0][k] = eta * DeltaO[k] + alpha * DeltaWeightHO[0][k] ;
WeightHO[0][k] += DeltaWeightHO[0][k] ;
for( j = 1 ; j <= NumHidden ; j++ ) {
DeltaWeightHO[j][k] = eta * Hidden[p][j] * DeltaO[k] + alpha * DeltaWeightHO[j][k] ;
WeightHO[j][k] += DeltaWeightHO[j][k] ;
}
}
}
if( epoch%100 == 0 ) //fprintf(stdout, "\nEpoch %-5d : Error = %f", epoch, Error) ;
if( Error < 0.0004 ) break ; /* stop learning when 'near enough' */
}
//fprintf(stdout, "\n\nNETWORK DATA - EPOCH %d\n\nPat\t", epoch) ; /* print network outputs */
for( i = 1 ; i <= NumInput ; i++ ) {
//fprintf(stdout, "Input%-4d\t", i) ;
}
for( k = 1 ; k <= NumOutput ; k++ ) {
//fprintf(stdout, "Target%-4d\tOutput%-4d\t", k, k) ;
}
for( p = 1 ; p <= NumPattern ; p++ ) {
//fprintf(stdout, "\n%d\t", p) ;
for( i = 1 ; i <= NumInput ; i++ ) {
//fprintf(stdout, "%f\t", Input[p][i]) ;
}
for( k = 1 ; k <= NumOutput ; k++ ) {
// string str=NumToStr(Output[p][k]);
// TextOut(0,k,str) ;
}
}
Wait(40000);
}