73.

c++ neural networks and fuzzy logic C++ Neural Networks and Fuzzy Logic
by Valluru B. Rao
M&T Books, IDG Books Worldwide, Inc.
ISBN: 1558515526   Pub Date: 06/01/95
  

Previous Table of Contents Next


A Source File for C++ Program for an ART1 Model Network

The implementations of the functions declared in the header file are contained in the source file for the C++ program for an ART1 model network. It also has the main function, which contains specifications of the number of neurons in the two layers of the network, the values of the vigilance and other parameters, and the input vectors. Note that if there are n neurons in a layer, they are numbered serially from 0 to n–1, and not from 1 to n in the C++ program. The source file is called art1net.cpp. It is set up with six neurons in the F1 layer and seven neurons in the F2 layer. The main function also contains the parameters needed in the algorithm.

To initialize the bottom-up weights, we set each weight to be –0.1 + L/(m – 1 + L) so that it is greater than 0 and less than L/(m – 1 + L), as suggested before. Similarly, the top-down weights are initialized by setting each of them to 0.2 + (B – 1)/D so it would be greater than (B – 1)/D. Initial activations of the F1 layer neurons are each set to –B/(1 + C), as suggested earlier.

A restrmax function is defined to compute the maximum in an array when one of the array elements is not desired to be a candidate for the maximum. This facilitates the removal of the current winner from competition when reset is needed. Reset is needed when the degree of match is of a smaller magnitude than the vigilance parameter.

The function iterate is a member function of the network class and does the processing for the network. The inqreset function of the network class compares the vigilance parameter with the degree of match.

 //art1net.cpp  V. Rao, H. Rao //Source file for ART1 network program #include "art1net.h" int restrmax(int j,double *b,int k)        {        int i,tmp;        for(i=0;i<j;i++){               if(i !=k)               {tmp = i;               i = j;}               }        for(i=0;i<j;i++){        if( (i != tmp)&&(i != k))          {if(b[i]>b[tmp]) tmp = i;}}        return tmp;        } void artneuron::getnrn(int m1,int m2,int m3, char *y) { int i; name = y; nnbr = m1; outn = m2; inn  = m3; for(i=0;i<outn;++i){        outwt[i] = 0 ;        } output = 0; activation = 0.0; }        void network::getnwk(int k,int l,float aa,float bb,float        cc,float dd,float ll) { anmbr = k; bnmbr = l; ninpt = 0; ai = aa; be = bb; ci = cc; di = dd; el = ll; int i,j; flag = 0; char *y1="ANEURON", *y2="BNEURON" ; for(i=0;i<anmbr;++i){        anrn[i].artneuron::getnrn(i,bnmbr,0,y1);} for(i=0;i<bnmbr;++i){        bnrn[i].artneuron::getnrn(i,0,anmbr,y2);} float tmp1,tmp2,tmp3; tmp1 = 0.2 +(be - 1.0)/di; tmp2 = -0.1 + el/(anmbr - 1.0 +el); tmp3 = - be/(1.0 + ci); for(i=0;i<anmbr;++i){        anrn[i].activation = tmp3;        acts1[i] = tmp3;        for(j=0;j<bnmbr;++j){               mtrx1[i][j]  = tmp1;               mtrx2[j][i] = tmp2;               anrn[i].outwt[j] = mtrx1[i][j];               bnrn[j].outwt[i] = mtrx2[j][i];               }        } prwts1(); prwts2(); practs1(); cout<<"\n"; } int network::winner(int k,double *v,int kk){ int t1; t1 = restrmax(k,v,kk); return t1; } void network::prwts1() { int i3,i4; cout<<"\nweights for F1 layer neurons: \n"; for(i3=0;i3<anmbr;++i3){        for(i4=0;i4<bnmbr;++i4){               cout<<anrn[i3].outwt[i4]<<"  ";}        cout<<"\n"; } cout<<"\n"; } void network::prwts2() { int i3,i4; cout<<"\nweights for F2 layer neurons: \n"; for(i3=0;i3<bnmbr;++i3){        for(i4=0;i4<anmbr;++i4){               cout<<bnrn[i3].outwt[i4]<<"  ";};        cout<<"\n";  } cout<<"\n"; } void network::practs1() { int j; cout<<"\nactivations of F1 layer neurons: \n"; for(j=0;j<anmbr;++j){        cout<<acts1[j]<<"   ";} cout<<"\n"; } void network::practs2() { int j; cout<<"\nactivations of F2 layer neurons: \n"; for(j=0;j<bnmbr;++j){        cout<<acts2[j]<<"   ";} cout<<"\n"; } void network::prouts1() { int j; cout<<"\noutputs of F1 layer neurons: \n"; for(j=0;j<anmbr;++j){        cout<<outs1[j]<<"   ";} cout<<"\n"; } void network::prouts2() { int j; cout<<"\noutputs of F2 layer neurons: \n"; for(j=0;j<bnmbr;++j){        cout<<outs2[j]<<"   ";} cout<<"\n"; } void network::asgninpt(int *b) { int j; sj = so = 0; cout<<"\nInput vector is:\n" ; for(j=0;j<anmbr;++j){        cout<<b[j]<<" ";} cout<<"\n"; for(j=0;j<anmbr;++j){        sj += b[j];        anrn[j].activation = b[j]/(1.0 +ci +ai*(b[j]+be));        acts1[j] = anrn[j].activation;        if(anrn[j].activation > 0) anrn[j].output = 1;        else               anrn[j].output = 0;        outs1[j] = anrn[j].output;        so += anrn[j].output;        } practs1(); prouts1(); } void network::inqreset(int t1) { int jj; flag = 0; jj = so/sj; cout<<"\ndegree of match: "<<jj<<" vigilance:  "<<rho<<"\n"; if( jj > rho ) flag = 1;        else        {cout<<"winner is "<<t1;        cout<<" reset required \n";} } void network::comput1(int k) { int j; for(j=0;j<bnmbr;++j){        int ii1;        double c1 = 0.0;        cout<<"\n";        for(ii1=0;ii1<anmbr;++ii1){               c1 += outs1[ii1] * mtrx2[j][ii1];               }        bnrn[j].activation = c1;        acts2[j] = c1;}; winr = winner(bnmbr,acts2,k); cout<<"winner is "<<winr; for(j=0;j<bnmbr;++j){        if(j == winr) bnrn[j].output = 1;        else bnrn[j].output =  0;        outs2[j] = bnrn[j].output;        } practs2(); prouts2(); } void network::comput2(int *b) { double db[MXSIZ]; double tmp; so = 0; int i,j; for(j=0;j<anmbr;++j){        db[j] =0.0;        for(i=0;i<bnmbr;++i){               db[j] += mtrx1[j][i]*outs2[i];};        tmp = b[j] + di*db[j];        acts1[j] = (tmp - be)/(ci +1.0 +ai*tmp);        anrn[j].activation = acts1[j];        if(anrn[j].activation > 0) anrn[j].output = 1;        else anrn[j].output = 0;        outs1[j] = anrn[j].output;        so += anrn[j].output;        } cout<<"\n"; practs1(); prouts1(); } void network::adjwts1() { int i; for(i=0;i<anmbr;++i){        if(outs1[i] >0) {mtrx1[i][winr]  = 1.0;}        else               {mtrx1[i][winr] = 0.0;}        anrn[i].outwt[winr] = mtrx1[i][winr];} prwts1(); } void network::adjwts2() { int i; cout<<"\nwinner is "<<winr<<"\n"; for(i=0;i<anmbr;++i){        if(outs1[i] > 0) {mtrx2[winr][i] = el/(so + el -1);}        else               {mtrx2[winr][i] = 0.0;}        bnrn[winr].outwt[i]  = mtrx2[winr][i];} prwts2(); } void network::iterate(int *b,float rr,int kk) { int j; rho = rr; flag = 0; asgninpt(b); comput1(kk); comput2(b); inqreset(winr); if(flag == 1){        ninpt ++;        adjwts1();        adjwts2();        int j3;        for(j3=0;j3<anmbr;++j3){               lrndptrn[ninpt][j3] = b[j3];}        prlrndp();        } else        {        for(j=0;j<bnmbr;++j){               outs2[j] = 0;               bnrn[j].output = 0;}        iterate(b,rr,winr);        } } void network::prlrndp() { int j; cout<<"\nlearned vector # "<<ninpt<<"  :\n"; for(j=0;j<anmbr;++j){        cout<<lrndptrn[ninpt][j]<<"  ";} cout<<"\n"; } void main() { int ar = 6, br = 7, rs = 8; float aa = 2.0,bb = 2.5,cc = 6.0,dd = 0.85,ll = 4.0,rr =        0.95; int inptv[][6]={0,1,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,1,0,1,0,\        1,0}; cout<<"\n\nTHIS PROGRAM IS FOR AN -ADAPTIVE RESONANCE THEORY\        1 - NETWORK.\n"; cout<<"THE NETWORK IS SET UP FOR ILLUSTRATION WITH "<<ar<<" \        INPUT NEURONS,\n"; cout<<" AND "<<br<<" OUTPUT NEURONS.\n"; static network bpn; bpn.getnwk(ar,br,aa,bb,cc,dd,ll) ; bpn.iterate(inptv[0],rr,rs); bpn.iterate(inptv[1],rr,rs); bpn.iterate(inptv[2],rr,rs); bpn.iterate(inptv[3],rr,rs); } 


Previous Table of Contents Next

Copyright © IDG Books Worldwide, Inc.



C++ Neural Networks and Fuzzy Logic
C++ Neural Networks and Fuzzy Logic
ISBN: 1558515526
EAN: 2147483647
Year: 1995
Pages: 139

flylib.com © 2008-2017.
If you may any questions please contact us: flylib@qtcs.net