2014--人工智能应用技术实验报告-人工神经网络程序设计.doc_第1页
2014--人工智能应用技术实验报告-人工神经网络程序设计.doc_第2页
2014--人工智能应用技术实验报告-人工神经网络程序设计.doc_第3页
2014--人工智能应用技术实验报告-人工神经网络程序设计.doc_第4页
2014--人工智能应用技术实验报告-人工神经网络程序设计.doc_第5页
已阅读5页,还剩9页未读 继续免费阅读

下载本文档

版权说明:本文档由用户提供并上传,收益归属内容提供方,若内容存在侵权,请进行举报或认领

文档简介

实 验 报 告课程名称 人工智能应用技术 实验项目 人工神经网络程序设计 实验仪器 WindowsXP、Visual C+ 学 院 信息管理学院 专 业 信息安全 班级/学号 信安1401 学生姓名 Cony 实验日期 2016-5-10 成 绩 指导教师 赵 刚 北京信息科技大学信息管理学院(课程上机)实验报告实验课程名称:人工智能应用技术 专业: 信息安全 班级: 学号: 姓名: 实验名称人工神经网络程序设计实验地点学院机房实验时间5/10 14节1. 实验目的:l 掌握基本神经网络的常用学习规则l 掌握人工神经网络的训练过程2. 实验内容:l 相关知识:基本神经网络(感知器,前馈网络)的常用学习规则l 实验环境:Windows XP, Visual studiol 主要内容:人工神经网络的程序设计与实现3. 实验要求:l 完成神经网络学习程序的调试,课堂演示程序执行结果l 输出神经网络权值调整过程值,分析结果数据,绘制神经网络l 提交实验报告4. 实验准备:掌握感知器学习算法 初始化:将权值向量赋予随机值,t=0(迭代次数) 连接权的修正:对每个输入样本xk及期望输出dk完成如下计算a. 计算网络输出:y = f(S),其中S =wixi,f为激活函数b. 计算输出层单元期望输出dk与实际输出y间的误差:ek = dk - yc. 若ek为零,则说明当前样本输出正确,不必更新权值,否则更新权值:w(t+1) = w(t) + ek xkt = t + 1 01为学习率。 对所有的输入样本重复步骤(2),直到所有的样本输出正确为止5. 实验过程:#include #include stdafx.h#define MAX_ITERATIONS1000#define INPUT_NEURONS2#define NUM_WEIGHTS(INPUT_NEURONS+1)#define ALPHA(double)0.2double weightsNUM_WEIGHTS;typedef struct double a; double b; double expected; training_data_t;#define MAX_TESTS4training_data_t training_setMAX_TESTS= -1.0, -1.0, -1.0, -1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 1.0, 1.0, 1.0 ;double compute( int test ) double result; /* Equation 10.2 */ result = (training_settest.a * weights0) + (training_settest.b * weights1) + (1.0 * weights2) ); if (result 0.0) result = 1.0; else result = -1.0; return result;int main() int i, test; double output; int change; /* Initialize the weights for the perceptron */ for ( i = 0 ; i NUM_WEIGHTS ; i+ ) weightsi = 0.0; /* Train the perceptron with the training set */ change = 1; while (change) change = 0; for ( test = 0 ; test MAX_TESTS ; test+ ) /* Test on the perceptron */ output = compute( test ); /* Perceptron Learning Algorithm */ double dif=training_settest.expected-output; if ( (int)training_settest.expected != (int)output ) /* Use Equation 10.3 */ weights0 += ALPHA * training_settest.expected * training_settest.a; weights1 += ALPHA * training_settest.expected * training_settest.b; weights2 += ALPHA * training_settest.expected; change = 1; /* Check the status of the Perceptron */ for (i = 0 ; i MAX_TESTS ; i+) printf( %g OR %g = %gn, training_seti.a, training_seti.b, compute(i) ); return 0;#include #include #include maths.c#include rand.h#define INPUT_NEURONS35#define HIDDEN_NEURONS10#define OUTPUT_NEURONS10double inputsINPUT_NEURONS+1;double hiddenHIDDEN_NEURONS+1;double outputsOUTPUT_NEURONS;#define RHO(double)0.1double w_h_iHIDDEN_NEURONSINPUT_NEURONS+1;double w_o_hOUTPUT_NEURONSHIDDEN_NEURONS+1;#define RAND_WEIGHT(double)rand() / (double)RAND_MAX) - 0.5)#define IMAGE_SIZE35typedef struct test_images_s int imageIMAGE_SIZE; int outputOUTPUT_NEURONS; test_image_t;#define MAX_TESTS10test_image_t testsMAX_TESTS = 0,1,1,1,0, / 0 1,0,0,0,1, 1,0,0,0,1, 1,0,0,0,1, 1,0,0,0,1, 1,0,0,0,1, 0,1,1,1,0 , 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 , 0,0,1,0,0, / 1 0,1,1,0,0, 0,0,1,0,0, 0,0,1,0,0, 0,0,1,0,0, 0,0,1,0,0, 0,1,1,1,0 , 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 , 0,1,1,1,0, / 2 1,0,0,0,1, 0,0,0,0,1, 0,0,1,1,0, 0,1,0,0,0, 1,0,0,0,0, 1,1,1,1,1 , 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 , 0,1,1,1,0, / 3 1,0,0,0,1, 0,0,0,0,1, 0,0,1,1,0, 0,0,0,0,1, 1,0,0,0,1, 0,1,1,1,0 , 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 , 0,0,0,1,0, / 4 0,0,1,1,0, 0,1,0,1,0, 1,1,1,1,1, 0,0,0,1,0, 0,0,0,1,0, 0,0,0,1,0 , 0, 0, 0, 0, 1, 0, 0, 0, 0, 0 , 1,1,1,1,1, / 5 1,0,0,0,0, 1,0,0,0,0, 1,1,1,1,0, 0,0,0,0,1, 1,0,0,0,1, 0,1,1,1,0 , 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 , 0,1,1,1,0, / 6 1,0,0,0,0, 1,0,0,0,0, 1,1,1,1,0, 1,0,0,0,1, 1,0,0,0,1, 0,1,1,1,0 , 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 , 1,1,1,1,1, / 7 1,0,0,0,1, 0,0,0,0,1, 0,0,0,1,0, 0,0,1,0,0, 0,1,0,0,0, 0,1,0,0,0 , 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 , 0,1,1,1,0, / 8 1,0,0,0,1, 1,0,0,0,1, 0,1,1,1,0, 1,0,0,0,1, 1,0,0,0,1, 0,1,1,1,0 , 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 , 0,1,1,1,0, / 9 1,0,0,0,1, 1,0,0,0,1, 0,1,1,1,1, 0,0,0,0,1, 0,0,0,1,0, 0,1,1,0,0 , 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ;void init_network( void ) int i, j; /* Set the input bias */ inputsINPUT_NEURONS = 1.0; /* Set the hidden bias */ hiddenHIDDEN_NEURONS = 1.0; /* Initialize the input-hidden weights */ for (j = 0 ; j HIDDEN_NEURONS ; j+) for (i = 0 ; i INPUT_NEURONS+1 ; i+) w_h_iji = RAND_WEIGHT; for (j = 0 ; j OUTPUT_NEURONS ; j+) for (i = 0 ; i HIDDEN_NEURONS+1 ; i+) w_o_hji = RAND_WEIGHT; return;void feed_forward( void ) int i, j; /* Calculate outputs of the hidden layer */ for (i = 0 ; i HIDDEN_NEURONS ; i+) hiddeni = 0.0; for (j = 0 ; j INPUT_NEURONS+1 ; j+) hiddeni += (w_h_iij * inputsj); hiddeni = sigmoid( hiddeni ); /* Calculate outputs for the output layer */ for (i = 0 ; i OUTPUT_NEURONS ; i+) outputsi = 0.0; for (j = 0 ; j HIDDEN_NEURONS+1 ; j+) outputsi += (w_o_hij * hiddenj ); outputsi = sigmoid( outputsi ); void backpropagate_error( int test ) int out, hid, inp; double err_outOUTPUT_NEURONS; double err_hidHIDDEN_NEURONS; /* Compute the error for the output nodes (Equation 10.6) */ for (out = 0 ; out OUTPUT_NEURONS ; out+) err_outout = (double)teststest.outputout - outputsout) * sigmoid_d(outputsout); /* Compute the error for the hidden nodes (Equation 10.7) */ for (hid = 0 ; hid HIDDEN_NEURONS ; hid+) err_hidhid = 0.0; /* Include error contribution for all output nodes */ for (out = 0 ; out OUTPUT_NEURONS ; out+) err_hidhid += err_outout * w_o_houthid; err_hidhid *= sigmoid_d( hiddenhid ); /* Adjust the weights from the hidden to output layer (Equation 10.9) */ for (out = 0 ; out OUTPUT_NEURONS ; out+) for (hid = 0 ; hid HIDDEN_NEURONS ; hid+) w_o_houthid += RHO * err_outout * hiddenhid; /* Adjust the weights from the input to hidden layer (Equation 10.9) */ for (hid = 0 ; hid HIDDEN_NEURONS ; hid+) for (inp = 0 ; inp INPUT_NEURONS+1 ; inp+) w_h_ihidinp += RHO * err_hidhid * inputsinp; return;double calculate_mse( int test ) double mse = 0.0; int i; for (i = 0 ; i OUTPUT_NEURONS ; i+) mse += sqr( (teststest.outputi - outputsi) ); return ( mse / (double)i );void set_network_inputs( int test, double noise_prob ) int i; /* Fill the network inputs vector from the test */ for (i = 0 ; i INPUT_NEURONS ; i+) inputsi = teststest.imagei; /* In the given noise probability, negate the cell */ if (RANDOM() noise_prob ) inputsi = (inputsi) ? 0 : 1; return;int classifier( void ) int i, best; double max; best = 0; max = outputs0; for (i = 1 ; i max) max = outputsi; best = i; return best;int main( void ) double mse, noise_prob; int test, i, j; RANDINIT(); init_network(); do /* Pick a test at random */ test = RANDMAX(MAX_TESTS); /* Grab input image (with no noise) */ set_network_inputs( test, 0.0 ); /* Feed this data set forward */ feed_forward(); /* Backpropagate the error */ backpropagate_error( test ); /* Calculate the current MSE */ mse = calculate_mse( test ); while (mse 0.001); /*

温馨提示

  • 1. 本站所有资源如无特殊说明,都需要本地电脑安装OFFICE2007和PDF阅读器。图纸软件为CAD,CAXA,PROE,UG,SolidWorks等.压缩文件请下载最新的WinRAR软件解压。
  • 2. 本站的文档不包含任何第三方提供的附件图纸等,如果需要附件,请联系上传者。文件的所有权益归上传用户所有。
  • 3. 本站RAR压缩包中若带图纸,网页内容里面会有图纸预览,若没有图纸预览就没有图纸。
  • 4. 未经权益所有人同意不得将文件中的内容挪作商业或盈利用途。
  • 5. 人人文库网仅提供信息存储空间,仅对用户上传内容的表现方式做保护处理,对用户上传分享的文档内容本身不做任何修改或编辑,并不能对任何下载内容负责。
  • 6. 下载文件中如有侵权或不适当内容,请与我们联系,我们立即纠正。
  • 7. 本站不保证下载资源的准确性、安全性和完整性, 同时也不承担用户因使用这些下载资源对自己和他人造成任何形式的伤害或损失。

评论

0/150

提交评论