径向基(RBF)神经网络python实现
from numpy import array, append, vstack, transpose, reshape, \
dot, true_divide, mean, exp, sqrt, log, \
loadtxt, savetxt, zeros, frombuffer
from numpy.linalg import norm, lstsq
from multiprocessing import Process, Array
from random import sample
from time import time
from sys import stdout
from ctypes import c_double
from h5py import File def metrics(a, b):
return norm(a - b) def gaussian (x, mu, sigma):
return exp(- metrics(mu, x)**2 / (2 * sigma**2)) def multiQuadric (x, mu, sigma):
return pow(metrics(mu,x)**2 + sigma**2, 0.5) def invMultiQuadric (x, mu, sigma):
return pow(metrics(mu,x)**2 + sigma**2, -0.5) def plateSpine (x,mu):
r = metrics(mu,x)
return (r**2) * log(r) class Rbf:
def __init__(self, prefix = 'rbf', workers = 4, extra_neurons = 0, from_files = None):
self.prefix = prefix
self.workers = workers
self.extra_neurons = extra_neurons # Import partial model
if from_files is not None:
w_handle = self.w_handle = File(from_files['w'], 'r')
mu_handle = self.mu_handle = File(from_files['mu'], 'r')
sigma_handle = self.sigma_handle = File(from_files['sigma'], 'r') self.w = w_handle['w']
self.mu = mu_handle['mu']
self.sigmas = sigma_handle['sigmas'] self.neurons = self.sigmas.shape[0] def _calculate_error(self, y):
self.error = mean(abs(self.os - y))
self.relative_error = true_divide(self.error, mean(y)) def _generate_mu(self, x):
n = self.n
extra_neurons = self.extra_neurons # TODO: Make reusable
mu_clusters = loadtxt('clusters100.txt', delimiter='\t') mu_indices = sample(range(n), extra_neurons)
mu_new = x[mu_indices, :]
mu = vstack((mu_clusters, mu_new)) return mu def _calculate_sigmas(self):
neurons = self.neurons
mu = self.mu sigmas = zeros((neurons, ))
for i in xrange(neurons):
dists = [0 for _ in xrange(neurons)]
for j in xrange(neurons):
if i != j:
dists[j] = metrics(mu[i], mu[j])
sigmas[i] = mean(dists)* 2
# max(dists) / sqrt(neurons * 2))
return sigmas def _calculate_phi(self, x):
C = self.workers
neurons = self.neurons
mu = self.mu
sigmas = self.sigmas
phi = self.phi = None
n = self.n def heavy_lifting(c, phi):
s = jobs[c][1] - jobs[c][0]
for k, i in enumerate(xrange(jobs[c][0], jobs[c][1])):
for j in xrange(neurons):
# phi[i, j] = metrics(x[i,:], mu[j])**3)
# phi[i, j] = plateSpine(x[i,:], mu[j]))
# phi[i, j] = invMultiQuadric(x[i,:], mu[j], sigmas[j]))
phi[i, j] = multiQuadric(x[i,:], mu[j], sigmas[j])
# phi[i, j] = gaussian(x[i,:], mu[j], sigmas[j]))
if k % 1000 == 0:
percent = true_divide(k, s)*100
print(c, ': {:2.2f}%'.format(percent))
print(c, ': Done') # distributing the work between 4 workers
shared_array = Array(c_double, n * neurons)
phi = frombuffer(shared_array.get_obj())
phi = phi.reshape((n, neurons)) jobs = []
workers = [] p = n / C
m = n % C
for c in range(C):
jobs.append((c*p, (c+1)*p + (m if c == C-1 else 0)))
worker = Process(target = heavy_lifting, args = (c, phi))
workers.append(worker)
worker.start() for worker in workers:
worker.join() return phi def _do_algebra(self, y):
phi = self.phi w = lstsq(phi, y)[0]
os = dot(w, transpose(phi))
return w, os
# Saving to HDF5
os_h5 = os_handle.create_dataset('os', data = os) def train(self, x, y):
self.n = x.shape[0] ## Initialize HDF5 caches
prefix = self.prefix
postfix = str(self.n) + '-' + str(self.extra_neurons) + '.hdf5'
name_template = prefix + '-{}-' + postfix
phi_handle = self.phi_handle = File(name_template.format('phi'), 'w')
os_handle = self.w_handle = File(name_template.format('os'), 'w')
w_handle = self.w_handle = File(name_template.format('w'), 'w')
mu_handle = self.mu_handle = File(name_template.format('mu'), 'w')
sigma_handle = self.sigma_handle = File(name_template.format('sigma'), 'w') ## Mu generation
mu = self.mu = self._generate_mu(x)
self.neurons = mu.shape[0]
print('({} neurons)'.format(self.neurons))
# Save to HDF5
mu_h5 = mu_handle.create_dataset('mu', data = mu) ## Sigma calculation
print('Calculating Sigma...')
sigmas = self.sigmas = self._calculate_sigmas()
# Save to HDF5
sigmas_h5 = sigma_handle.create_dataset('sigmas', data = sigmas)
print('Done') ## Phi calculation
print('Calculating Phi...')
phi = self.phi = self._calculate_phi(x)
print('Done')
# Saving to HDF5
print('Serializing...')
phi_h5 = phi_handle.create_dataset('phi', data = phi)
del phi
self.phi = phi_h5
print('Done') ## Algebra
print('Doing final algebra...')
w, os = self.w, _ = self._do_algebra(y)
# Saving to HDF5
w_h5 = w_handle.create_dataset('w', data = w)
os_h5 = os_handle.create_dataset('os', data = os) ## Calculate error
self._calculate_error(y)
print('Done') def predict(self, test_data):
mu = self.mu = self.mu.value
sigmas = self.sigmas = self.sigmas.value
w = self.w = self.w.value print('Calculating phi for test data...')
phi = self._calculate_phi(test_data)
os = dot(w, transpose(phi))
savetxt('iok3834.txt', os, delimiter='\n')
return os @property
def summary(self):
return '\n'.join( \
['-----------------',
'Training set size: {}'.format(self.n),
'Hidden layer size: {}'.format(self.neurons),
'-----------------',
'Absolute error : {:02.2f}'.format(self.error),
'Relative error : {:02.2f}%'.format(self.relative_error * 100)]) def predict(test_data):
mu = File('rbf-mu-212243-2400.hdf5', 'r')['mu'].value
sigmas = File('rbf-sigma-212243-2400.hdf5', 'r')['sigmas'].value
w = File('rbf-w-212243-2400.hdf5', 'r')['w'].value n = test_data.shape[0]
neur = mu.shape[0] mu = transpose(mu)
mu.reshape((n, neur)) phi = zeros((n, neur))
for i in range(n):
for j in range(neur):
phi[i, j] = multiQuadric(test_data[i,:], mu[j], sigmas[j]) os = dot(w, transpose(phi))
savetxt('iok3834.txt', os, delimiter='\n')
return os
径向基(RBF)神经网络python实现的更多相关文章
- RBF(径向基)神经网络
只要模型是一层一层的,并使用AD/BP算法,就能称作 BP神经网络.RBF 神经网络是其中一个特例.本文主要包括以下内容: 什么是径向基函数 RBF神经网络 RBF神经网络的学习问题 RBF神经网络与 ...
- RBF高斯径向基核函数【转】
XVec表示X向量.||XVec||表示向量长度.r表示两点距离.r^2表示r的平方.k(XVec,YVec) = exp(-1/(2*sigma^2)*(r^2))= exp(-gamma*r^2) ...
- 机器学习之径向基神经网络(RBF NN)
本文基于台大机器学习技法系列课程进行的笔记总结. 主要内容如下图所示: 首先介绍一下径向基函数网络的Hypothesis和网络的结构,然后介绍径向基神经网络学习算法,以及利用K-means进行的学习, ...
- RBF径向基神经网络——乳腺癌医学诊断建模
案例描述 近年来疾病早期诊断越来越受到医学专家的重视,从而产生了各种疾病诊断的新方法.乳癌最早的表现是患乳出现单发的.无痛性并呈进行性生长的小肿块.肿块位于外上象限最多见,其次是乳头.乳晕区和内上象限 ...
- 径向基网络(RBF network)
来源:http://blog.csdn.net/zouxy09/article/details/13297881 1.径向基函数 径向基函数(Radical Basis Function,RBF)方法 ...
- RBF神经网络
RBF神经网络 RBF神经网络通常只有三层,即输入层.中间层和输出层.其中中间层主要计算输入x和样本矢量c(记忆样本)之间的欧式距离的Radial Basis Function (RBF)的值,输出层 ...
- RBF神经网络——直接看公式,本质上就是非线性变换后的线性变化(RBF神经网络的思想是将低维空间非线性不可分问题转换成高维空间线性可分问题)
Deeplearning Algorithms tutorial 谷歌的人工智能位于全球前列,在图像识别.语音识别.无人驾驶等技术上都已经落地.而百度实质意义上扛起了国内的人工智能的大旗,覆盖无人驾驶 ...
- RBF神经网络学习算法及与多层感知器的比较
对于RBF神经网络的原理已经在我的博文<机器学习之径向基神经网络(RBF NN)>中介绍过,这里不再重复.今天要介绍的是常用的RBF神经网络学习算法及RBF神经网络与多层感知器网络的对比. ...
- RBF神经网络通用函数 newrb, newrbe
RBF神经网络通用函数 newrb, newrbe 1.newrb 其中P为输入向量,T为输出向量,GOAL为均方误差的目标,SPREED为径向基的扩展速度.返回值是一个构建好的网络,用newrb ...
- RBF神经网络的matlab简单实现
径向基神经网络 1.径向基函数 (Radial Basis Function,RBF) 神经网络是一种性能良好的前向网络,具有最佳逼近.训练简洁.学习收敛速度快以及克服局部最小值问题的性能,目前已经证 ...
随机推荐
- 如何在首次启动 Linux 虚拟机时对其进行自定义
在前面的教程中,你已学习如何通过 SSH 连接到虚拟机 (VM) 并手动安装 NGINX. 若要以快速一致的方式创建 VM,通常需要某种形式的自动化. 在首次启动 VM 时实现自定义的常见方法是使用 ...
- ExpressRoute 线路预配工作流和线路状态
本页从较高层面引导你完成服务预配和路由配置工作流. 下图和相应的步骤说明了预配端到端 ExpressRoute 线路所要执行的任务. 使用 PowerShell 配置 ExpressRoute 线路. ...
- C/C++内存分区
C/C++编译的程序占用的内存分区 1.栈区(stack)— 由编译器自动分配释放 ,存放函数的参数名,局部变量的名等.其操作方式类似于数据结构中的栈. 2.堆区(heap)— 由程序员分配释放, 若 ...
- asp.net使用一般处理程序实现文件下载
首先有一个html页面,页面有一个链接,点击链接弹出文件下载/保存(类似迅雷下载链接) <!DOCTYPE html> <html> <head> <meta ...
- RESTful 架构基础
源自:https://mp.weixin.qq.com/s/wEr2jAVphzB1G_MISlLU0w REST(Representational State Transfer)架构风格是一种世界观 ...
- 操作系统之Linux的内存分页管理
内存是计算机的主存储器.内存为进程开辟出进程空间,让进程在其中保存数据.我将从内存的物理特性出发,深入到内存管理的细节,特别是了解虚拟内存和内存分页的概念. 内存 简单地说,内存就是一个数据货架.内存 ...
- 1.4 Installation and Setup(安装和设置)
1.4 Installation and Setup(安装和设置) 这里我们用Anaconda发行版作为Python的使用环境,推荐安装Python3.6,本书就是用Python3.6代码写成的.(译 ...
- Alpha冲刺(5/10)——追光的人
1.队友信息 队员学号 队员博客 221600219 小墨 https://www.cnblogs.com/hengyumo/ 221600240 真·大能猫 https://www.cnblogs. ...
- Linux 无法连接网络排查方法
.hosts文件增加 127.0.0.1 对localhost的解析. .检查/etc/resove.cnf dns配置是否正确 .route命令检查是否有默认路由,没有就 route add 网段 ...
- [转] Eclipse安装SVN插件
eclipse里安装SVN插件,一般来说,有三种方式: 1. 直接下载SVN插件,将其解压到eclipse的对应目录里 2. 使用eclipse 里Help菜单的“Install New Softwa ...