永发信息网

人工神经网络BP算法源代码与演示程序怎么用?

答案:2  悬赏:60  手机版
解决时间 2021-02-26 11:44
  • 提问者网友:人傍凄凉立暮秋
  • 2021-02-26 07:20
zxl35066008 能不能把代码发一份给我!!!!?谢啦!!!!
最佳答案
  • 五星知识达人网友:像个废品
  • 2021-02-26 08:04
在matlab里建立一个.m的M文件,把代码输进去,保存,运行就可以了。
演示程序是在command 里打demo就可以了找到了 . 你邮箱多少,我只有简单的BP神经网络程序。
全部回答
  • 1楼网友:洎扰庸人
  • 2021-02-26 09:19
// annbp.cpp: implementation of the cannbp class. // ////////////////////////////////////////////////////////////////////// #include "stdafx.h" #include "annbp.h" #include "math.h" ////////////////////////////////////////////////////////////////////// // construction/destruction ////////////////////////////////////////////////////////////////////// cannbp::cannbp() { eta1=0.3; momentum1=0.3; } cannbp::~cannbp() { } double cannbp::drnd() { return ((double) rand() / (double) bigrnd); } double cannbp::dpn1() { return (double) (rand())/(32767/2)-1; } double cannbp::squash(double x) { return (1.0 / (1.0 + exp(-x))); } double* cannbp::alloc_1d_dbl(int n) { double *new1; new1 = (double *) malloc ((unsigned) (n * sizeof (double))); if (new1 == null) { afxmessagebox("alloc_1d_dbl: couldn't allocate array of doubles\n"); return (null); } return (new1); } double** cannbp::alloc_2d_dbl(int m, int n) { int i; double **new1; new1 = (double **) malloc ((unsigned) (m * sizeof (double *))); if (new1 == null) { afxmessagebox("alloc_2d_dbl: couldn't allocate array of dbl ptrs\n"); return (null); } for (i = 0; i < m; i++) { new1[i] = alloc_1d_dbl(n); } return (new1); } void cannbp::bpnn_randomize_weights(double **w, int m, int n) { int i, j; for (i = 0; i <= m; i++) { for (j = 0; j <= n; j++) { w[i][j] = dpn1(); } } } void cannbp::bpnn_zero_weights(double **w, int m, int n) { int i, j; for (i = 0; i <= m; i++) { for (j = 0; j <= n; j++) { w[i][j] = 0.0; } } } void cannbp::bpnn_initialize(int seed) { cstring msg,s; msg="random number generator seed:"; s.format("%d",seed); afxmessagebox(msg+s); srand(seed); } bpnn* cannbp::bpnn_internal_create(int n_in, int n_hidden, int n_out) { bpnn *newnet; newnet = (bpnn *) malloc (sizeof (bpnn)); if (newnet == null) { printf("bpnn_create: couldn't allocate neural network\n"); return (null); } newnet->input_n = n_in; newnet->hidden_n = n_hidden; newnet->output_n = n_out; newnet->input_units = alloc_1d_dbl(n_in + 1); newnet->hidden_units = alloc_1d_dbl(n_hidden + 1); newnet->output_units = alloc_1d_dbl(n_out + 1); newnet->hidden_delta = alloc_1d_dbl(n_hidden + 1); newnet->output_delta = alloc_1d_dbl(n_out + 1); newnet->target = alloc_1d_dbl(n_out + 1); newnet->input_weights = alloc_2d_dbl(n_in + 1, n_hidden + 1); newnet->hidden_weights = alloc_2d_dbl(n_hidden + 1, n_out + 1); newnet->input_prev_weights = alloc_2d_dbl(n_in + 1, n_hidden + 1); newnet->hidden_prev_weights = alloc_2d_dbl(n_hidden + 1, n_out + 1); return (newnet); } void cannbp::bpnn_free(bpnn *net) { int n1, n2, i; n1 = net->input_n; n2 = net->hidden_n; free((char *) net->input_units); free((char *) net->hidden_units); free((char *) net->output_units); free((char *) net->hidden_delta); free((char *) net->output_delta); free((char *) net->target); for (i = 0; i <= n1; i++) { free((char *) net->input_weights[i]); free((char *) net->input_prev_weights[i]); } free((char *) net->input_weights); free((char *) net->input_prev_weights); for (i = 0; i <= n2; i++) { free((char *) net->hidden_weights[i]); free((char *) net->hidden_prev_weights[i]); } free((char *) net->hidden_weights); free((char *) net->hidden_prev_weights); free((char *) net); } bpnn* cannbp::bpnn_create(int n_in, int n_hidden, int n_out) { bpnn *newnet; newnet = bpnn_internal_create(n_in, n_hidden, n_out); #ifdef initzero bpnn_zero_weights(newnet->input_weights, n_in, n_hidden); #else bpnn_randomize_weights(newnet->input_weights, n_in, n_hidden); #endif bpnn_randomize_weights(newnet->hidden_weights, n_hidden, n_out); bpnn_zero_weights(newnet->input_prev_weights, n_in, n_hidden); bpnn_zero_weights(newnet->hidden_prev_weights, n_hidden, n_out); return (newnet); } void cannbp::bpnn_layerforward(double *l1, double *l2, double **conn, int n1, int n2) { double sum; int j, k; l1[0] = 1.0; for (j = 1; j <= n2; j++) { sum = 0.0; for (k = 0; k <= n1; k++) { sum += conn[k][j] * l1[k]; } l2[j] = squash(sum); } } void cannbp::bpnn_output_error(double *delta, double *target, double *output, int nj, double *err) { int j; double o, t, errsum; errsum = 0.0; for (j = 1; j <= nj; j++) { o = output[j]; t = target[j]; delta[j] = o * (1.0 - o) * (t - o); errsum += abs(delta[j]); } *err = errsum; } void cannbp::bpnn_hidden_error(double *delta_h, int nh, double *delta_o, int no, double **who, double *hidden, double *err) { int j, k; double h, sum, errsum; errsum = 0.0; for (j = 1; j <= nh; j++) { h = hidden[j]; sum = 0.0; for (k = 1; k <= no; k++) { sum += delta_o[k] * who[j][k]; } delta_h[j] = h * (1.0 - h) * sum; errsum += abs(delta_h[j]); } *err = errsum; } void cannbp::bpnn_adjust_weights(double *delta, int ndelta, double *ly, int nly, double **w, double **oldw, double eta, double momentum) { double new_dw; int k, j; ly[0] = 1.0; for (j = 1; j <= ndelta; j++) { for (k = 0; k <= nly; k++) { new_dw = ((eta * delta[j] * ly[k]) + (momentum * oldw[k][j])); w[k][j] += new_dw; oldw[k][j] = new_dw; } } } void cannbp::bpnn_feedforward(bpnn *net) { int in, hid, out; in = net->input_n; hid = net->hidden_n; out = net->output_n; bpnn_layerforward(net->input_units, net->hidden_units, net->input_weights, in, hid); bpnn_layerforward(net->hidden_units, net->output_units, net->hidden_weights, hid, out); } void cannbp::bpnn_train(bpnn *net, double eta, double momentum, double *eo, double *eh) { int in, hid, out; double out_err, hid_err; in = net->input_n; hid = net->hidden_n; out = net->output_n; bpnn_layerforward(net->input_units, net->hidden_units, net->input_weights, in, hid); bpnn_layerforward(net->hidden_units, net->output_units, net->hidden_weights, hid, out); bpnn_output_error(net->output_delta, net->target, net->output_units, out, &out_err); bpnn_hidden_error(net->hidden_delta, hid, net->output_delta, out, net->hidden_weights, net->hidden_units, &hid_err); *eo = out_err; *eh = hid_err; bpnn_adjust_weights(net->output_delta, out, net->hidden_units, hid, net->hidden_weights, net->hidden_prev_weights, eta, momentum); bpnn_adjust_weights(net->hidden_delta, hid, net->input_units, in, net->input_weights, net->input_prev_weights, eta, momentum); } void cannbp::bpnn_save(bpnn *net, char *filename) { cfile file; char *mem; int n1, n2, n3, i, j, memcnt; double dvalue, **w; n1 = net->input_n; n2 = net->hidden_n; n3 = net->output_n; printf("saving %dx%dx%d network to '%s'\n", n1, n2, n3, filename); try { file.open(filename,cfile::modewrite|cfile::modecreate|cfile::modenotruncate); } catch(cfileexception* e) { e->reporterror(); e->delete(); } file.write(&n1,sizeof(int)); file.write(&n2,sizeof(int)); file.write(&n3,sizeof(int)); memcnt = 0; w = net->input_weights; mem = (char *) malloc ((unsigned) ((n1+1) * (n2+1) * sizeof(double))); // mem = (char *) malloc (((n1+1) * (n2+1) * sizeof(double))); for (i = 0; i <= n1; i++) { for (j = 0; j <= n2; j++) { dvalue = w[i][j]; //fastcopy(&mem[memcnt], &dvalue, sizeof(double)); fastcopy(&mem[memcnt], &dvalue, sizeof(double)); memcnt += sizeof(double); } } file.write(mem,sizeof(double)*(n1+1)*(n2+1)); free(mem); memcnt = 0; w = net->hidden_weights; mem = (char *) malloc ((unsigned) ((n2+1) * (n3+1) * sizeof(double))); // mem = (char *) malloc (((n2+1) * (n3+1) * sizeof(double))); for (i = 0; i <= n2; i++) { for (j = 0; j <= n3; j++) { dvalue = w[i][j]; fastcopy(&mem[memcnt], &dvalue, sizeof(double)); // fastcopy(&mem[memcnt], &dvalue, sizeof(double)); memcnt += sizeof(double); } } file.write(mem, (n2+1) * (n3+1) * sizeof(double)); // free(mem); file.close(); return; } bpnn* cannbp::bpnn_read(char *filename) { char *mem; bpnn *new1; int n1, n2, n3, i, j, memcnt; cfile file; try { file.open(filename,cfile::moderead|cfile::modecreate|cfile::modenotruncate); } catch(cfileexception* e) { e->reporterror(); e->delete(); } // printf("reading '%s'\n", filename);// fflush(stdout); file.read(&n1, sizeof(int)); file.read(&n2, sizeof(int)); file.read(&n3, sizeof(int)); new1 = bpnn_internal_create(n1, n2, n3); // printf("'%s' contains a %dx%dx%d network\n", filename, n1, n2, n3); // printf("reading input weights..."); // fflush(stdout); memcnt = 0; mem = (char *) malloc (((n1+1) * (n2+1) * sizeof(double))); file.read(mem, ((n1+1)*(n2+1))*sizeof(double)); for (i = 0; i <= n1; i++) { for (j = 0; j <= n2; j++) { //fastcopy(&(new1->input_weights[i][j]), &mem[memcnt], sizeof(double)); fastcopy(&(new1->input_weights[i][j]), &mem[memcnt], sizeof(double)); memcnt += sizeof(double); } } free(mem); // printf("done\nreading hidden weights..."); //fflush(stdout); memcnt = 0; mem = (char *) malloc (((n2+1) * (n3+1) * sizeof(double))); file.read(mem, (n2+1) * (n3+1) * sizeof(double)); for (i = 0; i <= n2; i++) { for (j = 0; j <= n3; j++) { //fastcopy(&(new1->hidden_weights[i][j]), &mem[memcnt], sizeof(double)); fastcopy(&(new1->hidden_weights[i][j]), &mem[memcnt], sizeof(double)); memcnt += sizeof(double); } } free(mem); file.close(); printf("done\n"); //fflush(stdout); bpnn_zero_weights(new1->input_prev_weights, n1, n2); bpnn_zero_weights(new1->hidden_prev_weights, n2, n3); return (new1); } void cannbp::createbp(int n_in, int n_hidden, int n_out) { net=bpnn_create(n_in,n_hidden,n_out); } void cannbp::freebp() { bpnn_free(net); } void cannbp::train(double *input_unit,int input_num, double *target,int target_num, double *eo, double *eh) { for(int i=1;i<=input_num;i++) { net->input_units[i]=input_unit[i-1]; } for(int j=1;j<=target_num;j++) { net->target[j]=target[j-1]; } bpnn_train(net,eta1,momentum1,eo,eh); } void cannbp::identify(double *input_unit,int input_num,double *target,int target_num) { for(int i=1;i<=input_num;i++) { net->input_units[i]=input_unit[i-1]; } bpnn_feedforward(net); for(int j=1;j<=target_num;j++) { target[j-1]=net->output_units[j]; } } void cannbp::save(char *filename) { bpnn_save(net,filename); } void cannbp::read(char *filename) { net=bpnn_read(filename); } void cannbp::setbparm(double eta, double momentum) { eta1=eta; momentum1=momentum; } void cannbp::initialize(int seed) { bpnn_initialize(seed); }
我要举报
如以上回答内容为低俗、色情、不良、暴力、侵权、涉及违法等信息,可以点下面链接进行举报!
点此我要举报以上问答信息
大家都在看
推荐资讯