分享

BP神经网络的C语言实现

 quasiceo 2016-01-18

BP神经网络的C语言实现

(2014-08-13 00:51:37)
标签:

it

分类: 图形图像

BP神经网络的C语言实现

 

 

#ifndef _BACKPROP_H_

#define _BACKPROP_H_

#define BIGRND 0x7fffffff

 

typedef struct {
int input_n;                 
int hidden_n;                
int output_n;                

double *input_units;         
double *hidden_units;        
double *output_units;        

double *hidden_delta;        
double *output_delta;        

double *target;              

double **input_weights;      
double **hidden_weights;     

                               
double **input_prev_weights;
double **hidden_prev_weights;
} BPNN;



void Init(int seed);


BPNN *Create(int n_in, int n_hidden, int n_out);


void Free(BPNN *net);


void Train(BPNN *net, double eta, double momentum, double *eo, double *eh);


void FeedForward(BPNN* net);


void Save(BPNN *net, char *filename);


BPNN *Read(char *filename);


#endif

 

 

//#include "StdAfx.h"
#include
#include "backprop.h"
#include
#include

#define ABS(x)          (((x) > 0.0) ? (x) : (-(x)))


#define fastcopy(to,from,len)\
{\
register char *_to,*_from;\
register int _i,_l;\
_to = (char *)(to);\
_from = (char *)(from);\
_l = (len);\
for (_i = 0; _i < _l; _i++) *_to++ = *_from++;\
}


double drnd()
{
return ((double) rand() / (double) BIGRND);
}


double dpn1()
{
return ((drnd() * 2.0) - 1.0);
}

 

double squash(double x)
{
return (1.0 / (1.0 + exp(-x)));
}


double *Alloc1D(int n)
{
double *new1;

new1 = (double *) malloc ((unsigned) (n * sizeof (double)));
if (new1 == NULL) {
   printf("Alloc1D: Couldn't allocate array of doubles\n");
   return (NULL);
}
return (new1);
}


double **Alloc2D(int m, int n)
{
int i;
double **new1;

new1 = (double **) malloc ((unsigned) (m * sizeof (double *)));
if (new1 == NULL) {
   printf("Alloc2D: Couldn't allocate array of dbl ptrs\n");
   return (NULL);
}

for (i = 0; i < m; i++) {
   new1[i] = Alloc1D(n);
}

return (new1);
}


void RandWeights(double **w, int m, int n)
{
int i, j;

for (i = 0; i <= m; i++) {
   for (j = 0; j <= n; j++) {
    w[i][j] = dpn1();
   }
}
}


void ZeroWeights(double **w, int m, int n)
{
int i, j;

for (i = 0; i <= m; i++) {
   for (j = 0; j <= n; j++) {
    w[i][j] = 0.0;
   }
}
}


void Init(int seed)
{
printf("Random number generator seed: %d\n", seed);
srand(seed);
}


BPNN *InterCreate(int n_in, int n_hidden, int n_out)
{
BPNN *newnet;

newnet = (BPNN *) malloc (sizeof (BPNN));
if (newnet == NULL) {
   printf("Create: Couldn't allocate neural network\n");
   return (NULL);
}

newnet->input_n = n_in;
newnet->hidden_n = n_hidden;
newnet->output_n = n_out;
newnet->input_units = Alloc1D(n_in + 1);
newnet->hidden_units = Alloc1D(n_hidden + 1);
newnet->output_units = Alloc1D(n_out + 1);

newnet->hidden_delta = Alloc1D(n_hidden + 1);
newnet->output_delta = Alloc1D(n_out + 1);
newnet->target = Alloc1D(n_out + 1);

newnet->input_weights = Alloc2D(n_in + 1, n_hidden + 1);
newnet->hidden_weights = Alloc2D(n_hidden + 1, n_out + 1);

newnet->input_prev_weights = Alloc2D(n_in + 1, n_hidden + 1);
newnet->hidden_prev_weights = Alloc2D(n_hidden + 1, n_out + 1);

return (newnet);
}


void Free(BPNN *net)
{
int n1, n2, i;

n1 = net->input_n;
n2 = net->hidden_n;

free((char *) net->input_units);
free((char *) net->hidden_units);
free((char *) net->output_units);

free((char *) net->hidden_delta);
free((char *) net->output_delta);
free((char *) net->target);

for (i = 0; i <= n1; i++) {
   free((char *) net->input_weights[i]);
   free((char *) net->input_prev_weights[i]);
}
free((char *) net->input_weights);
free((char *) net->input_prev_weights);

for (i = 0; i <= n2; i++) {
   free((char *) net->hidden_weights[i]);
   free((char *) net->hidden_prev_weights[i]);
}
free((char *) net->hidden_weights);
free((char *) net->hidden_prev_weights);

free((char *) net);
}


BPNN *Create(int n_in, int n_hidden, int n_out)
{
BPNN *newnet;

newnet = InterCreate(n_in, n_hidden, n_out);

#ifdef INITZERO
ZeroWeights(newnet->input_weights, n_in, n_hidden);
#else
RandWeights(newnet->input_weights, n_in, n_hidden);
#endif
RandWeights(newnet->hidden_weights, n_hidden, n_out);
ZeroWeights(newnet->input_prev_weights, n_in, n_hidden);
ZeroWeights(newnet->hidden_prev_weights, n_hidden, n_out);

return (newnet);
}

 

void LyrForward(double *l1, double *l2, double **conn, int n1, int n2)
{
double sum;
int j, k;


l1[0] = 1.0;


for (j = 1; j <= n2; j++) {

  
   sum = 0.0;
   for (k = 0; k <= n1; k++) {
    sum += conn[k][j] * l1[k];
   }
   l2[j] = squash(sum);
}
}


void OutErr(double *delta, double *target, double *output, int nj, double *err)
{
int j;
double o, t, errsum;

errsum = 0.0;
for (j = 1; j <= nj; j++) {
   o = output[j];
   t = target[j];
   delta[j] = o * (1.0 - o) * (t - o);
   errsum += ABS(delta[j]);
}
*err = errsum;
}


void HiddenErr(double* delta_h, int nh, double *delta_o, int no, double **who, double *hidden, double *err)
{
int j, k;
double h, sum, errsum;

errsum = 0.0;
for (j = 1; j <= nh; j++) {
   h = hidden[j];
   sum = 0.0;
   for (k = 1; k <= no; k++) {
    sum += delta_o[k] * who[j][k];
   }
   delta_h[j] = h * (1.0 - h) * sum;
   errsum += ABS(delta_h[j]);
}
*err = errsum;
}


void AdjWeights(double *delta, int ndelta, double *ly, int nly, double** w, double **oldw, double eta, double momentum)
{
double new_dw;
int k, j;

ly[0] = 1.0;
for (j = 1; j <= ndelta; j++) {
   for (k = 0; k <= nly; k++) {
    new_dw = ((eta * delta[j] * ly[k]) + (momentum * oldw[k][j]));
    w[k][j] += new_dw;
    oldw[k][j] = new_dw;
   }
}
}


void FeedForward(BPNN* net)
{
int in, hid, out;

in = net->input_n;
hid = net->hidden_n;
out = net->output_n;


LyrForward(net->input_units, net->hidden_units,
   net->input_weights, in, hid);
LyrForward(net->hidden_units, net->output_units,
   net->hidden_weights, hid, out);
}


void Train(BPNN *net, double eta, double momentum, double *eo, double *eh)
{
int in, hid, out;
double out_err, hid_err;

in = net->input_n;
hid = net->hidden_n;
out = net->output_n;


LyrForward(net->input_units, net->hidden_units,
   net->input_weights, in, hid);
LyrForward(net->hidden_units, net->output_units,
   net->hidden_weights, hid, out);


OutErr(net->output_delta, net->target, net->output_units,
   out, &out_err);
HiddenErr(net->hidden_delta, hid, net->output_delta, out,
   net->hidden_weights, net->hidden_units, &hid_err);
*eo = out_err;
*eh = hid_err;


AdjWeights(net->output_delta, out, net->hidden_units, hid,
   net->hidden_weights, net->hidden_prev_weights, eta, momentum);
AdjWeights(net->hidden_delta, hid, net->input_units, in,
   net->input_weights, net->input_prev_weights, eta, momentum);
}



void Save(BPNN *net, char *filename)
{
int n1, n2, n3, i, j, memcnt;
double dvalue, **w;
char *mem;
FILE *fd;

if ((fd = fopen(filename, "w")) == NULL) {
   printf("Save: Cannot create '%s'\n", filename);
   return;
}

n1 = net->input_n; n2 = net->hidden_n; n3 = net->output_n;
printf("Saving %dx%dx%d network to '%s'\n", n1, n2, n3, filename);
fflush(stdout);


fwrite((char *) &n1, sizeof(int), 1, fd);
fwrite((char *) &n2, sizeof(int), 1, fd);
fwrite((char *) &n3, sizeof(int), 1, fd);

memcnt = 0;
w = net->input_weights;
mem = (char *) malloc ((unsigned) ((n1+1) * (n2+1) * sizeof(double)));
for (i = 0; i <= n1; i++) {
   for (j = 0; j <= n2; j++) {
    dvalue = w[i][j];
    fastcopy(&mem[memcnt], &dvalue, sizeof(double));
    memcnt += sizeof(double);
   }
}

fwrite(mem, (n1+1) * (n2+1) * sizeof(double), 1, fd);
free(mem);

memcnt = 0;
w = net->hidden_weights;
mem = (char *) malloc ((unsigned) ((n2+1) * (n3+1) * sizeof(double)));
for (i = 0; i <= n2; i++) {
   for (j = 0; j <= n3; j++) {
    dvalue = w[i][j];
    fastcopy(&mem[memcnt], &dvalue, sizeof(double));
    memcnt += sizeof(double);
   }
}

fwrite(mem, (n2+1) * (n3+1) * sizeof(double), 1, fd);
free(mem);

fclose(fd);
return;
}


BPNN *Read(char *filename)
{
char *mem;
BPNN *new1;
int n1, n2, n3, i, j, memcnt;
FILE *fd;

if ((fd = fopen(filename, "r")) == NULL) {
   return (NULL);
}

printf("Reading '%s'\n", filename); fflush(stdout);

fread((char *) &n1, sizeof(int), 1, fd);
fread((char *) &n2, sizeof(int), 1, fd);
fread((char *) &n3, sizeof(int), 1, fd);

new1 = InterCreate(n1, n2, n3);

printf("'%s' contains a %dx%dx%d network\n", filename, n1, n2, n3);
printf("Reading input weights..."); fflush(stdout);

memcnt = 0;
mem = (char *) malloc ((unsigned) ((n1+1) * (n2+1) * sizeof(double)));

fread( mem, (n1+1) * (n2+1) * sizeof(double), 1, fd);
for (i = 0; i <= n1; i++) {
   for (j = 0; j <= n2; j++) {
    fastcopy(&(new1->input_weights[i][j]), &mem[memcnt], sizeof(double));
    memcnt += sizeof(double);
   }
}
free(mem);

printf("Done\nReading hidden weights..."); fflush(stdout);

memcnt = 0;
mem = (char *) malloc ((unsigned) ((n2+1) * (n3+1) * sizeof(double)));

fread( mem, (n2+1) * (n3+1) * sizeof(double), 1, fd);
for (i = 0; i <= n2; i++) {
   for (j = 0; j <= n3; j++) {
    fastcopy(&(new1->hidden_weights[i][j]), &mem[memcnt], sizeof(double));
    memcnt += sizeof(double);
   }
}
free(mem);
fclose(fd);

printf("Done\n"); fflush(stdout);

ZeroWeights(new1->input_prev_weights, n1, n2);
ZeroWeights(new1->hidden_prev_weights, n2, n3);

return (new1);
}

    本站是提供个人知识管理的网络存储空间,所有内容均由用户发布,不代表本站观点。请注意甄别内容中的联系方式、诱导购买等信息,谨防诈骗。如发现有害或侵权内容,请点击一键举报。
    转藏 分享 献花(0

    0条评论

    发表

    请遵守用户 评论公约

    类似文章 更多