BP神经网络实现
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# BP-ANN.py
# Created on: 2014-06-12 09:49:56.00000
# Description:
# --------------------------------------------------------------------------- import os
import math
import time
import datetime #输入层n个神经元
n = 4
#隐含层p个神经元
p = 6
#输出层q个神经元
q = 1 #输入向量
trainsample = 60
x = [[0 for i in range(0, n)] for j in range(trainsample)]
x = [[0.27500,0.60000,0.14085,0.07143],[0.22500,0.43333,0.14085,0.07143],[0.17500,0.50000,0.12676,0.07143],[0.15000,0.46667,0.15493,0.07143],[0.25000,0.63333,0.14085,0.07143],[0.35000,0.73333,0.18310,0.14286],[0.15000,0.56667,0.14085,0.10714],[0.25000,0.56667,0.15493,0.07143],[0.10000,0.40000,0.14085,0.07143],[0.22500,0.46667,0.15493,0.03571],[0.35000,0.66667,0.15493,0.07143],[0.20000,0.56667,0.16901,0.07143],[0.20000,0.43333,0.14085,0.03571],[0.07500,0.43333,0.09859,0.03571],[0.45000,0.76667,0.11268,0.07143],[0.42500,0.90000,0.15493,0.14286],[0.35000,0.73333,0.12676,0.14286],[0.27500,0.60000,0.14085,0.10714],[0.42500,0.70000,0.18310,0.10714],[0.27500,0.70000,0.15493,0.10714],[0.75000,0.50000,0.60563,0.50000],[0.60000,0.50000,0.57746,0.53571],[0.72500,0.46667,0.63380,0.53571],[0.37500,0.20000,0.50704,0.46429],[0.62500,0.36667,0.59155,0.53571],[0.42500,0.36667,0.57746,0.46429],[0.57500,0.53333,0.60563,0.57143],[0.22500,0.23333,0.40845,0.35714],[0.65000,0.40000,0.59155,0.46429],[0.30000,0.33333,0.49296,0.50000],[0.25000,0.10000,0.43662,0.35714],[0.47500,0.43333,0.53521,0.53571],[0.50000,0.16667,0.50704,0.35714],[0.52500,0.40000,0.60563,0.50000],[0.40000,0.40000,0.45070,0.46429],[0.67500,0.46667,0.56338,0.50000],[0.40000,0.43333,0.57746,0.53571],[0.45000,0.33333,0.52113,0.35714],[0.55000,0.16667,0.57746,0.53571],[0.40000,0.26667,0.49296,0.39286],[0.57500,0.53333,0.78873,0.89286],[0.45000,0.33333,0.66197,0.67857],[0.77500,0.43333,0.77465,0.75000],[0.57500,0.40000,0.73239,0.64286],[0.62500,0.43333,0.76056,0.78571],[0.90000,0.43333,0.87324,0.75000],[0.22500,0.26667,0.57746,0.60714],[0.82500,0.40000,0.83099,0.64286],[0.67500,0.26667,0.76056,0.64286],[0.80000,0.63333,0.80282,0.89286],[0.62500,0.50000,0.66197,0.71429],[0.60000,0.33333,0.69014,0.67857],[0.70000,0.43333,0.71831,0.75000],[0.42500,0.26667,0.64789,0.71429],[0.45000,0.36667,0.66197,0.85714],[0.60000,0.50000,0.69014,0.82143],[0.62500,0.43333,0.71831,0.64286],[0.92500,0.70000,0.88732,0.78571],[0.92500,0.30000,0.91549,0.82143],[0.50000,0.16667,0.64789,0.53571]] d = [0 for i in range(0, trainsample)]
d = [0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7] #隐含层(p个隐含层神经元, 每个隐含层神经元有对应输入层神经元有一个系数)
hi = [0 for i in range(0, p)]
ho = [0 for i in range(0, p)]
b1 = [0.3 for i in range(0, p)]
Whi = [[0.5 for i in range(0, n)] for j in range(p)]
pp = [0 for i in range(0, p)]
rateWhi = 0.05 #输出层(q个输出层神经元, 每个输出层神经元有对应隐含层神经元有一个系数)
yi = [0 for i in range(0, q)]
yo = [1 for i in range(0, q)]
b2 = [0.3 for i in range(0, q)]
Woh = [[0.5 for i in range(0, p)] for j in range(q)]
qq = [0 for i in range(0, q)]
rateWoh = 0.05
#误差函数
e=1.0 def f1(x):
y = 1.0/(1.0+math.exp(-1.0*x))
return y def f2(x):
y = f1(x)
y = y * (1.0 - y)
return y def train():
e = 0.0
for trainIndex in range(0, trainsample): # trainsample
#print(x[trainIndex])
#隐含层 输入 输出
for i in range(0, p):
hi[i] = 0
for j in range(0, n):
hi[i] = hi[i] + Whi[i][j] * x[trainIndex][j]
ho[i] = f1(hi[i]+b1[i])
#输出层 输入 输出
for i in range(0, q):
yi[i] = 0
for j in range(0, p):
yi[i] = yi[i] + Woh[i][j] * ho[j]
yo[i] = f1(yi[i]+b2[i])
#print(hi)
#误差函数对输出层的各神经元的偏导数
for i in range(0, q):
qq[i] = 0
qq[i] = (d[trainIndex] - yo[i]) * yo[i] * (1 - yo[i])# * ho[i]
for k in range(0, p):
Woh[i][k] = Woh[i][k] + rateWoh * qq[i] * ho[k] for i in range(0, p):
pp[i] = 0
for j in range(0, q):
pp[i] = pp[i] + qq[j] * Woh[j][i]
pp[i] = pp[i] * ho[i] * (1 - ho[i])
for k in range(0, n):
Whi[i][k] = Whi[i][k] + rateWhi * pp[i] * x[trainIndex][k] for i in range(0, q):
e = e + 0.5 * math.pow((d[trainIndex] - yo[i]), 2) #
for i in range(0, q):
b2[i] = b2[i] + rateWoh * qq[i]
for i in range(0, p):
b1[i] = b1[i] + rateWhi * pp[i]
return e def recognize(vv):
#隐含层 输入 输出
for i in range(0, p):
hi[i] = 0
for j in range(0, n):
hi[i] = hi[i] + Whi[i][j] * vv[j]
ho[i] = f1(hi[i]+b1[i])
#输出层 输入 输出
for i in range(0, q):
yi[i] = 0
for j in range(0, p):
yi[i] = yi[i] + Woh[i][j] * ho[j]
yo[i] = f1(yi[i]+b2[i])
print('-------------------------------------------------------')
print(yo) times = 0
while e > 0.001:
#计算各层输入和输出
e = train()
times = times + 1
print(str(times)+" " + str(e))
if times > 10000:
break
recognize([0.7, 0.5, 0.774647887, 0.821428571])
recognize([0.4,0.433333333,0.521126761,0.464285714])
recognize([0.25,0.5,0.112676056,0.071428571])
BP神经网络实现的更多相关文章
- BP神经网络原理及python实现
[废话外传]:终于要讲神经网络了,这个让我踏进机器学习大门,让我读研,改变我人生命运的四个字!话说那么一天,我在乱点百度,看到了这样的内容: 看到这么高大上,这么牛逼的定义,怎么能不让我这个技术宅男心 ...
- BP神经网络
秋招刚结束,这俩月没事就学习下斯坦福大学公开课,想学习一下深度学习(这年头不会DL,都不敢说自己懂机器学习),目前学到了神经网络部分,学习起来有点吃力,把之前学的BP(back-progagation ...
- 数据挖掘系列(9)——BP神经网络算法与实践
神经网络曾经很火,有过一段低迷期,现在因为深度学习的原因继续火起来了.神经网络有很多种:前向传输网络.反向传输网络.递归神经网络.卷积神经网络等.本文介绍基本的反向传输神经网络(Backpropaga ...
- BP神经网络推导过程详解
BP算法是一种最有效的多层神经网络学习方法,其主要特点是信号前向传递,而误差后向传播,通过不断调节网络权重值,使得网络的最终输出与期望输出尽可能接近,以达到训练的目的. 一.多层神经网络结构及其描述 ...
- 极简反传(BP)神经网络
一.两层神经网络(感知机) import numpy as np '''极简两层反传(BP)神经网络''' # 样本 X = np.array([[0,0,1],[0,1,1],[1,0,1],[1, ...
- BP神经网络
BP神经网络基本原理 BP神经网络是一种单向传播的多层前向网络,具有三层或多层以上的神经网络结构,其中包含输入层.隐含层和输出层的三层网络应用最为普遍. 网络中的上下层之间实现全连接,而每层神经元之 ...
- BP神经网络学习笔记_附源代码
BP神经网络基本原理: 误差逆传播(back propagation, BP)算法是一种计算单个权值变化引起网络性能变化的较为简单的方法.由于BP算法过程包含从输出节点开始,反向地向第一隐含层(即最接 ...
- 机器学习(一):梯度下降、神经网络、BP神经网络
这几天围绕论文A Neural Probability Language Model 看了一些周边资料,如神经网络.梯度下降算法,然后顺便又延伸温习了一下线性代数.概率论以及求导.总的来说,学到不少知 ...
- 基于Storm 分布式BP神经网络,将神经网络做成实时分布式架构
将神经网络做成实时分布式架构: Storm 分布式BP神经网络: http://bbs.csdn.net/topics/390717623 流式大数据处理的三种框架:Storm,Spark和Sa ...
- BP神经网络算法学习
BP(Back Propagation)网络是1986年由Rumelhart和McCelland为首的科学家小组提出,是一种按误差逆传播算法训练的多层前馈网络,是眼下应用最广泛的神经网络模型之中的一个 ...
随机推荐
- Deep learning:四十六(DropConnect简单理解)
和maxout(maxout简单理解)一样,DropConnect也是在ICML2013上发表的,同样也是为了提高Deep Network的泛化能力的,两者都号称是对Dropout(Dropout简单 ...
- Codrops 教程:基于 CSS3 的精美模态窗口效果
Codrops 分享了漂亮的模态窗口效果实现方法,希望给前端开发人员提供一些创新显示对话框的启示.这个方案使用了触发按钮(或任何的 HTML 元素),在点击的时候出现一个模态窗口,带有简单的过渡(或动 ...
- js cookie存储方法
/*! * jQuery Cookie Plugin v1.4.0 * https://github.com/carhartl/jquery-cookie * * Copyright 2013 Kla ...
- installshield生成时提示6003错误的一种可能
因为项目需要,2014年写过的一个老项目需要重新打包.开发电脑换了,原来开发的系统是win7,现在已经升到了win10.而且原来使用installshield limited 2013开发的打包项目已 ...
- Swift的期待
去年底苹果开源 Swift 之后,Google.Facebook和Uber三个互联网巨头就曾在伦敦召开会议讨论Swift在各自开发战略中的地位.近日业界有消息传出,谷歌有意考虑将Swift作为Andr ...
- 浅谈你感兴趣的 C# GC 机制底层
本文内容是学习CLR.via C#的21章后个人整理,有不足之处欢迎指导. 昨天是1024,coder的节日,我为自己coder之路定下一句准则--保持学习,保持自信,保持谦逊,保持分享,越走越远. ...
- 关于ExtJs4的Grid带 查询 参数 分页(baseParams-->extraParams)
(园里很多文章,美名其曰 :ExtJs GridPanel+查询条件+分页. 但是加查询条件后点击下一页,查询条件失效,求你们自己测试明白再显摆 不要误导我这种新人.) ExtJs6发布了,ExtJ ...
- Mssql中一些常用数据类型的说明和区别
Mssql中一些常用数据类型的说明和区别 1.bigint 占用8个字节的存储空间,取值范围在-2^63 (-9,223,372,036,854,775,808) 到 2^63-1 (9,223,37 ...
- 深入Java事务的原理与应用
一.什么是JAVA事务 通常的观念认为,事务仅与数据库相关. 事务必须服从ISO/IEC所制定的ACID原则.ACID是原子性(atomicity).一致性(consistency).隔离性 ( ...
- ASP.NET MVC 网站开发总结(六)——简谈Json的序列化与反序列化
首先,先简单的谈一下什么是序列化与反序列化,序列化 (Serialization)将对象的状态信息转换为可以存储或传输的形式的过程.在序列化期间,对象将其当前状态写入到临时或持久性存储区.以后,可以通 ...