Java实现的简单神经网络(基于Sigmoid激活函数)
主体代码
NeuronNetwork.java
package com.rockbb.math.nnetwork; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; public class NeutonNetwork {
private List<NeuronLayer> layers; public NeuronNetwork(int[] sizes, double bpFactor, Activator activator) {
layers = new ArrayList<>(sizes.length - 1);
int inputSize = sizes[0];
for (int i = 1; i < sizes.length; i++) {
NeuronLayer layer = new NeuronLayer(inputSize, sizes[i], activator, bpFactor);
layers.add(layer);
inputSize = sizes[i];
}
for (int i = 0; i < layers.size() - 1; i++) {
layers.get(i).setNext(layers.get(i + 1));
}
} public List<NeuronLayer> getLayers() {return layers;}
public void setLayers(List<NeuronLayer> layers) {this.layers = layers;} public double getError() {
return layers.get(layers.size() - 1).getError();
} public List<Double> predict(List<Double> inputs) {
List<Double> middle = inputs;
for (int i = 0; i < layers.size(); i++) {
middle = layers.get(i).forward(middle);
}
return middle;
} public void backward() {
for (int j= layers.size() - 1; j >=0; j--) {
layers.get(j).backward();
}
} public void fillTargets(List<Double> targets) {
layers.get(layers.size() - 1).fillTargets(targets);
} @Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (int j = 0; j < layers.size(); j++) {
sb.append(layers.get(j).toString());
}
return sb.toString();
} public static String listToString(List<Double> list) {
StringBuilder sb = new StringBuilder();
for (Double t : list) {
sb.append(String.format("% 10.8f ", t));
}
return sb.toString();
} public static void main(String[] args) {
int[] sz = new int[]{2, 4, 1};
double[][] trainData = {{0d, 0d},{0d, 1d},{1d, 0d},{1d, 1d}};
double[][] targetDate = {{0d},{1d},{1d},{0d}}; NeuronNetwork nn = new NeuronNetwork(sz, 0.5d, new SigmoidActivator());
for (int kk = 0; kk < 20000; kk++) {
double totalError = 0d;
for (int i = 0; i < trainData.length; i++) {
List<Double> inputs = Arrays.asList(trainData[i][0], trainData[i][1]);
List<Double> targets = Arrays.asList(targetDate[i][0]);
nn.fillTargets(targets);
nn.predict(inputs);
//System.out.print(nn);
System.out.println(String.format("kk:%5d, i:%d, error: %.8f\n", kk, i, nn.getError()));
totalError += nn.getError();
nn.backward();
}
System.out.println(String.format("kk:%5d, Total Error: %.8f\n\n", kk, totalError));
if (totalError < 0.0001) {
System.out.println(nn);
break;
}
}
System.out.println(nn);
}
}
NeuronLayer.java
package com.rockbb.math.nnetwork; import java.util.ArrayList;
import java.util.List; public class NeuronLayer {
private int inputSize;
private List<Neuron> neurons;
private double bias;
private Activator activator;
private NeuronLayer next;
private double bpFactor;
private List<Double> inputs; public NeuronLayer(int inputSize, int size, Activator activator, double bpFactor) {
this.inputSize = inputSize;
this.activator = activator;
this.bpFactor = bpFactor;
this.bias = Math.random() - 0.5; this.neutrons = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
Neuron neuron = new Neuron(this, inputSize);
neurons.add(neuron);
}
} public int getInputSize() {return inputSize;}
public void setInputSize(int inputSize) {this.inputSize = inputSize;}
public List<Neuron> getNeurons() {return neurons;}
public void setNeurons(List<Neuron> neurons) {this.neurons = neurons;}
public double getBias() {return bias;}
public void setBias(double bias) {this.bias = bias;}
public Activator getActivator() {return activator;}
public void setActivator(Activator activator) {this.activator = activator;}
public NeutronLayer getNext() {return next;}
public void setNext(NeutronLayer next) {this.next = next;} public List<Double> forward(List<Double> inputs) {
this.inputs = inputs;
List<Double> outputs = new ArrayList<Double>(neurons.size());
for (int i = 0; i < neurons.size(); i++) {
outputs.add(0d);
}
for (int i = 0; i < neurons.size(); i++) {
double output = neurons.get(i).forward(inputs);
outputs.set(i, output);
}
return outputs;
} public void backward() {
if (this.next == null) {
// If this is the output layer, calculate delta for each neutron
double totalDelta = 0d;
for (int i = 0; i < neurons.size(); i++) {
Neutron n = neurons.get(i);
double delta = -(n.getTarget() - n.getOutput()) * activator.backwardDelta(n.getOutput());
n.setBpDelta(delta);
totalDelta += delta;
// Reflect to each weight under this neuron
for (int j = 0; j < n.getWeights().size(); j++) {
n.getWeights().set(j, n.getWeights().get(j) - bpFactor * delta * inputs.get(j));
}
}
// Relfect to bias
this.bias = this.bias - bpFactor * totalDelta / neutrons.size();
} else {
// if this is the hidden layer
double totalDelta = 0d;
for (int i = 0; i < neurons.size(); i++) {
Neuron n = neurons.get(i);
List<Neuron> downNeurons = next.getNeurons();
double delta = 0;
for (int j = 0; j < downNeurons.size(); j++) {
delta += downNeurons.get(j).getBpDelta() * downNeurons.get(j).getWeights().get(i);
}
delta = delta * activator.backwardDelta(n.getOutput());
n.setBpDelta(delta);
totalDelta += delta;
// Reflect to each weight under this neuron
for (int j = 0; j < n.getWeights().size(); j++) {
n.getWeights().set(j, n.getWeights().get(j) - bpFactor * delta * inputs.get(j));
}
}
// Relfect to bias
this.bias = this.bias - bpFactor * totalDelta / neutrons.size();
}
} public double getError() {
double totalError = 0d;
for (int i = 0; i < neurons.size(); i++) {
totalError += Math.pow(neurons.get(i).getError(), 2);
}
return totalError / (2 * neurons.size());
} public void fillTargets(List<Double> targets) {
for (int i = 0; i < neurons.size(); i++) {
neurons.get(i).setTarget(targets.get(i));
}
} public double filter(double netInput) {
return activator.forward(netInput + bias);
} @Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(String.format("Input size: %d, bias: %.8f\n", inputSize, bias));
for (int i = 0; i < neurons.size(); i++) {
sb.append(String.format("%3d: %s\n", i, neurons.get(i).toString()));
}
return sb.toString();
}
}
Neuron.java
package com.rockbb.math.nnetwork; import java.util.ArrayList;
import java.util.List; public class Neuron {
private NeuronLayer layer;
private List<Double> weights;
private double output;
private double target;
private double bpDelta; public Neuron(NeuronLayer layer, int inputSize) {
this.layer = layer;
this.weights = new ArrayList<>(inputSize);
for (int i = 0; i < inputSize; i++) {
// Initialize each weight with value [0.1, 1)
weights.add(Math.random() * 0.9 + 0.1);
}
this.bpDelta = 0d;
} public NeuronLayer getLayer() {return layer;}
public void setLayer(NeuronLayer layer) {this.layer = layer;}
public List<Double> getWeights() {return weights;}
public void setWeights(List<Double> weights) {this.weights = weights;}
public double getOutput() {return output;}
public void setOutput(double output) {this.output = output;}
public double getTarget() {return target;}
public void setTarget(double target) {this.target = target;}
public double getBpDelta() {return bpDelta;}
public void setBpDelta(double bpDelta) {this.bpDelta = bpDelta;} public double calcNetInput(List<Double> inputs) {
double netOutput = 0f;
for (int i = 0; i < weights.size(); i++) {
netOutput += inputs.get(i) * weights.get(i);
}
return netOutput;
} public double forward(List<Double> inputs) {
double netInput = calcNetInput(inputs);
this.output = layer.filter(netInput);
return this.output;
} public double getError() {
return target - output;
} @Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(String.format("O:% 10.8f T:% 10.8f D:% 10.8f w:{", output, target, bpDelta));
for (int i = 0; i < weights.size(); i++) {
sb.append(String.format("% 10.8f ", weights.get(i)));
}
sb.append('}');
return sb.toString();
}
}
激活函数
Activator.java
package com.rockbb.math.nnetwork;
public interface Activator {
double forward(double input);
double backwardDelta(double output);
}
SigmoidActivator.java
package com.rockbb.math.nnetwork;
public class SigmoidActivator implements Activator {
public double forward(double input) {
return 1 / (1 + Math.exp(-input));
}
public double backwardDelta(double output) {
return output * (1 - output);
}
}
在同样的训练数据和误差目标下, 比 http://www.emergentmind.com/neural-network 使用更少的训练次数.
使用Sigmoid激活函数工作正常.
使用ReLu激活函数时总会使某个Neuron冻结, 不能收敛, 待检查
Java实现的简单神经网络(基于Sigmoid激活函数)的更多相关文章
- JAX-WS 学习一:基于java的最简单的WebService服务
JAVA 1.6 之后,自带的JAX-WS API,这使得我们可以很方便的开发一个基于Java的WebService服务. 基于JAVA的WebService 服务 1.创建服务端WebService ...
- 深度学习原理与框架-神经网络架构 1.神经网络构架 2.激活函数(sigmoid和relu) 3.图片预处理(减去均值和除标准差) 4.dropout(防止过拟合操作)
神经网络构架:主要时表示神经网络的组成,即中间隐藏层的结构 对图片进行说明:我们可以看出图中的层数分布: input layer表示输入层,维度(N_num, input_dim) N_num表示输 ...
- day-11 python自带库实现2层简单神经网络算法
深度神经网络算法,是基于神经网络算法的一种拓展,其层数更深,达到多层,本文以简单神经网络为例,利用梯度下降算法进行反向更新来训练神经网络权重和偏向参数,文章最后,基于Python 库实现了一个简单神经 ...
- struts1:(Struts重构)构建一个简单的基于MVC模式的JavaWeb
在构建一个简单的基于MVC模式的JavaWeb 中,我们使用了JSP+Servlet+JavaBean构建了一个基于MVC模式的简单登录系统,但在其小结中已经指出,这种模式下的Controller 和 ...
- 一个简单的基于HTTP协议的屏幕共享应用
HTTP协议可以能是应用层协议里使用最广泛并且用途最多样的一个了.我们一般使用HTTP协议来浏览网页,但是HTTP协议还用来做很多其它用途.对开发人员来讲很常见的一种就是用HTTP协议作为各种版本控制 ...
- 如何用70行Java代码实现深度神经网络算法
http://www.tuicool.com/articles/MfYjQfV 如何用70行Java代码实现深度神经网络算法 时间 2016-02-18 10:46:17 ITeye 原文 htt ...
- java实现一个简单的Web服务器
注:本段内容来源于<JAVA 实现 简单的 HTTP服务器> 1. HTTP所有状态码 状态码 状态码英文名称 中文描述 100 Continue 继续.客户端应继续其请求 101 Swi ...
- 最简单的基于FFmpeg的移动端样例:Android 视频解码器-单个库版
===================================================== 最简单的基于FFmpeg的移动端样例系列文章列表: 最简单的基于FFmpeg的移动端样例:A ...
- Java语言实现简单FTP软件------>FTP软件主界面的实现(四)
首先看一下该软件的整体代码框架 1.首先介绍程序的主入口FTPMain.java,采用了一个漂亮的外观风格 package com.oyp.ftp; im ...
随机推荐
- 解决MTP device安装失败,手机无法被读取
操作步骤: 1. 我的电脑—>管理—>设备管理器—>便携设备(MTP)-->右击-更新驱动 2. 选择浏览计算机以查找驱动程序软件—>从计算机列表选择—>MTP ...
- Boost.Asio 网络编程([译]Boost.Asio基本原理)
转自:https://m.w3cschool.cn/nlzbw/nlzbw-3vs825ya.html Boost.Asio基本原理 这一章涵盖了使用Boost.Asio时必须知道的一些事情.我们也将 ...
- iOS开发-16进制颜色转换
项目中经常会用到颜色转换,有的是通过十六进制转成数字转颜色,想简单的点直接通过字符串转一下,简单扩展了一下分类UIColor,代码如下: +(UIColor *)colorWithHex:(NSStr ...
- 领扣-120 三角形最小路径和 Triangle MD
三角形最小路径和 Triangle 数组 动态规划 问题 给定一个三角形,找出自顶向下的最小路径和.每一步只能移动到下一行中相邻的结点上. 例如,给定三角形: [2], [3,4], [6,5,7], ...
- mybatis plus 主键生成 Twitter雪花算法 id 及修改id为字符型
mybatis plus配置主键生成策略为2,就是 使用Twitter雪花算法 生成id spring boot中配置为: GlobalConfiguration conf = new GlobalC ...
- 学 Win32 汇编[34] - 宏汇编(1)
从接触 C 语言时, 我就不大喜欢宏; 但为了看懂别人的代码也不得不去了解. 宏可定义在源程序的任意位置, 但一般放在 .data 前面.有些简单的宏可以用 equ.textequ 或 = 来代替, ...
- Impala 数值函数大全(转载)
官网:https://www.cloudera.com/documentation/enterprise/latest/topics/impala_math_functions.html 转载链接1: ...
- win7下使用Taste实现协同过滤算法
如果要实现Taste算法,必备的条件是: 1) JDK,使用1.6版本.需要说明一下,因为要基于Eclipse构建,所以在设置path的值之前要先定义JAVA_HOME变量. 2) Maven,使用2 ...
- Docker: Unknown – Unable to query docker version: x509: certificate is valid for
I was playing around with Docker locally and somehow ended up with this error when I tried to list ...
- External component has thrown an exception
C#调用c++的DLL报错:External component has thrown an exception, 也没有log产生,怎么回事那? [解决方法] 这是因为c++的程序报错了,而且没有c ...