机器学习基石笔记:Homework #3 LinReg&LogReg相关习题
原文地址:http://www.jianshu.com/p/311141f2047d
问题描述
程序实现
13-15
# coding: utf-8
import numpy as np
import numpy.random as random
import matplotlib.pyplot as plt
def sign(x):
if(x>=0):
return 1
else:
return -1
def gen_data():
x1=random.uniform(-1,1,1000)
x2=random.uniform(-1,1,1000)
id_array=random.permutation([i for i in range(1000)])
dataY=np.zeros((1000,1))
for i in range(1000):
if(i<1000*0.1):
i = id_array[i]
dataY[i][0]=-sign(x1[i]**2+x2[i]**2-0.6)
else:
i = id_array[i]
dataY[i][0]=sign(x1[i]**2+x2[i]**2-0.6)
dataX=np.concatenate((np.ones((1000,1)),np.array(x1).reshape((1000,1)),np.array(x2).reshape((1000,1))),axis=1)
return dataX,dataY
def w_lin(dataX,dataY):
dataX_T=np.transpose(dataX)
tmp=np.dot(np.linalg.inv(np.dot(dataX_T,dataX)),dataX_T)
return np.dot(tmp,dataY)
def pred(dataX,wLIN):
pred=np.dot(dataX,wLIN)
num_data=dataX.shape[0]
for i in range(num_data):
pred[i][0]=sign(pred[i][0])
return pred
def zero_one_cost(pred,dataY):
return np.sum(pred!=dataY)/dataY.shape[0]
def feat_transform(dataX):
num_data=dataX.shape[0]
tmp1=dataX[:,1]*dataX[:,2]
tmp2=dataX[:,1]**2
tmp3=dataX[:,2]**2
new_dataX=np.concatenate(
(dataX,tmp1.reshape((num_data,1)),tmp2.reshape((num_data,1)),tmp3.reshape((num_data,1))),axis=1)
return new_dataX
if __name__=="__main__":
cost_list=[]
for i in range(1000):
dataX,dataY=gen_data()
wLIN=w_lin(dataX,dataY)
cost_list.append(zero_one_cost(pred(dataX,wLIN),dataY))
# show results
print("the average Ein over 1000 experiments: ",sum(cost_list)/len(cost_list))
plt.figure()
plt.hist(cost_list)
plt.xlabel("zero_one Ein")
plt.ylabel("frequency")
plt.title("13")
plt.savefig("13.png")
W=[]
cost_list=[]
for i in range(1000):
# train
dataX,dataY=gen_data()
dataX=feat_transform(dataX)
wLIN=w_lin(dataX,dataY)
W.append(wLIN[:,0].tolist())
# test
testX, testY = gen_data()
testX = feat_transform(testX)
cost_list.append(zero_one_cost(pred(testX, wLIN), testY))
min_cost=min(cost_list)
min_id=cost_list.index(min_cost)
print(W[min_id])
W=np.array(W)
# show w3
print("the average w3 over 1000 experiments: ",np.average(W,axis=0)[3])
plt.figure()
plt.hist(W[:,3].tolist())
plt.xlabel("w3")
plt.ylabel("frequency")
plt.title("14")
plt.savefig("14.png")
# show Eout
print("the average Eout over 1000 experiments: ",sum(cost_list)/len(cost_list))
plt.figure()
plt.hist(cost_list)
plt.xlabel("Eout")
plt.ylabel("frequency")
plt.title("15")
plt.savefig("15.png")
18-20
# coding: utf-8
import numpy as np
def sigmoid(x):
return 1/(1+np.e**(-x))
def read_data(dataFile):
with open(dataFile,'r') as f:
lines=f.readlines()
data_list=[]
for line in lines:
line=line.strip().split()
data_list.append([1.0] + [float(l) for l in line])
dataArray=np.array(data_list)
num_data=dataArray.shape[0]
num_dim=dataArray.shape[1]-1
dataX=dataArray[:,:-1].reshape((num_data,num_dim))
dataY=dataArray[:,-1].reshape((num_data,1))
return dataX,dataY
def gradient_descent(w,dataX,dataY,eta):
assert w.shape[0]==dataX.shape[1],"wrong shape!"
assert w.shape[1]==1,"wrong shape of w!"
num_data=dataX.shape[0]
num_dim=dataX.shape[1]
tmp1=-dataY*dataX
tmp2=-dataY*np.dot(dataX,w)
for i in range(num_data):
tmp2[i][0]=sigmoid(tmp2[i][0])
tmp3=np.average(tmp1 * tmp2, axis=0)
new_w=w-eta*tmp3.reshape((num_dim,1))
return new_w
def s_gradient_descent(w,dataX,dataY,eta):
assert w.shape[0]==dataX.shape[1],"wrong shape!"
assert w.shape[1]==1,"wrong shape of w!"
assert dataX.shape[0]==1,"wrong shape of x!"
assert dataY.shape[0]==1,"wrong shape of y!"
num_dim=dataX.shape[1]
tmp1=-dataY*dataX
tmp2=-dataY*np.dot(dataX,w)
tmp2[0][0]=sigmoid(tmp2[0][0])
tmp3=np.average(tmp1 * tmp2, axis=0)
new_w=w-eta*tmp3.reshape((num_dim,1))
return new_w
def pred(wLOG,dataX):
pred=np.dot(dataX,wLOG)
num_data=dataX.shape[0]
for i in range(num_data):
pred[i][0]=sigmoid(pred[i][0])
if(pred[i][0]>=0.5):
pred[i][0]=1
else:
pred[i][0]=-1
return pred
def zero_one_cost(pred,dataY):
return np.sum(pred!=dataY)/dataY.shape[0]
if __name__=="__main__":
# train
dataX,dataY=read_data("hw3_train.dat")
num_dim=dataX.shape[1]
w=np.zeros((num_dim,1))
print("\n18")
for i in range(2000):
w=gradient_descent(w,dataX,dataY,eta=0.001)
print("the weight vector within g: ",w[:,0])
# test
testX,testY=read_data("hw3_test.dat")
Eout=zero_one_cost(pred(w,testX),testY)
print("the Eout(g) on the test set: ",Eout)
print("\n18.1")
w = np.zeros((num_dim, 1))
for i in range(20000):
w = gradient_descent(w, dataX, dataY, eta=0.001)
print("the weight vector within g: ", w[:, 0])
# test
Eout = zero_one_cost(pred(w, testX), testY)
print("the Eout(g) on the test set: ", Eout)
print("\n19")
w=np.zeros((num_dim,1))
for i in range(2000):
w = gradient_descent(w, dataX, dataY, eta=0.01)
print("the weight vector within g: ", w[:, 0])
# test
Eout = zero_one_cost(pred(w, testX), testY)
print("the Eout(g) on the test set: ", Eout)
print("\n20")
w=np.zeros((num_dim,1))
num_data=dataX.shape[0]
for i in range(2000):
i%=num_data
x=dataX[i,:].reshape((1,num_dim))
y=dataY[i,:].reshape((1,1))
w=s_gradient_descent(w,x,y,eta=0.001)
print("the weight vector within g: ", w[:, 0])
# test
Eout = zero_one_cost(pred(w, testX), testY)
print("the Eout(g) on the test set: ", Eout)
运行结果及分析
13-15
18-20
对比18和18.1,可知迭代步长较小时,需要较多迭代次数才能达到较优效果。
机器学习基石笔记:Homework #3 LinReg&LogReg相关习题的更多相关文章
- 机器学习基石笔记:Homework #1 PLA&PA相关习题
原文地址:http://www.jianshu.com/p/5b4a64874650 问题描述 程序实现 # coding: utf-8 import numpy as np import matpl ...
- 机器学习基石笔记:Homework #2 decision stump相关习题
原文地址:http://www.jianshu.com/p/4bc01760ac20 问题描述 程序实现 17-18 # coding: utf-8 import numpy as np import ...
- 机器学习基石笔记:11 Linear Models for Classification、LC vs LinReg vs LogReg、OVA、OVO
原文地址:https://www.jianshu.com/p/6f86290e70f9 一.二元分类的线性模型 线性回归后的参数值常用于PLA/PA/Logistic Regression的参数初始化 ...
- 机器学习基石笔记:Homework #4 Regularization&Validation相关习题
原文地址:https://www.jianshu.com/p/3f7d4aa6a7cf 问题描述 程序实现 # coding: utf-8 import numpy as np import math ...
- 机器学习基石:Homework #0 SVD相关&常用矩阵求导公式
- 林轩田机器学习基石笔记1—The Learning Problem
机器学习分为四步: When Can Machine Learn? Why Can Machine Learn? How Can Machine Learn? How Can Machine Lear ...
- 机器学习基石笔记:01 The Learning Problem
原文地址:https://www.jianshu.com/p/bd7cb6c78e5e 什么时候适合用机器学习算法? 存在某种规则/模式,能够使性能提升,比如准确率: 这种规则难以程序化定义,人难以给 ...
- 机器学习基石笔记:04 Feasibility of Learning
原文地址:https://www.jianshu.com/p/f2f4d509060e 机器学习是设计算法\(A\),在假设集合\(H\)里,根据给定数据集\(D\),选出与实际模式\(f\)最为相近 ...
- 机器学习基石笔记:03 Types of Learning
原文地址:https://www.jianshu.com/p/86b2a9cef742 一.学习的分类 根据输出空间\(Y\):分类(二分类.多分类).回归.结构化(监督学习+输出空间有结构): 根据 ...
随机推荐
- Excel2016怎么批量删除空白行 如何删除空白行
我们在编辑Excel文档的时候,如果表格中有大量的空白行,这时我们怎么样把它们批量删除呢?下面我们就一起来看看操作的方法吧. 工具/原料 Excel2016 方法/步骤 首先在电脑上打 ...
- 数据库SQL调优的几种方式(转)
原文地址:https://blog.csdn.net/u010520146/article/details/81161762 在项目中,SQL的调优对项目的性能来讲至关重要,所有掌握常见的SQL调优方 ...
- upc组队赛3 T-net【贪心】
T-net 题目描述 T-net which is a new telecommunications company, plans to install its base stations in th ...
- python 3和python 2 的不同之 f - strings
python3.6版本及以上版本才能使用 f "{}{}{}" f-string 格式化输出
- Maven入门指南12:将项目发布到私服
1 . 修改私服中仓库的部署策略 Release版本的项目应该发布到Releases仓库中,对应的,Snapshot版本应该发布到Snapshots仓库中.Maven根据pom.xml文件中版本号&l ...
- Vue项目引入sass
最近两天手头的事情暂时搞完了,可以抽出空来学习一下东西,之前项目都是鹏哥搭建好了,我们在直接在里面写代码,sass语法用来写样式还是比较方便常用的,今天就来试试怎么引入和配置sass 参考文章:Vue ...
- (转)Unity Cinemachine插件,实现单目标和多目标之间切换
Unity Cinemachine插件学习笔记,实现单目标和多目标之间切换*版本要求Unity2017.1及以上. 参考资料: [官方] Unity 2017.1正式版发布 Cinemachine插件 ...
- 升级ssh后续问题
升级了openssh后远端的服务器无法通过sftp传输文件到高服务器,后来发现是远端的服务器ssh版本太低,而新升级了openssh的服务器已经不再支持老版本ssh client的相关协议,这时候有两 ...
- redis的密码设置
若连接redis时报错:Redis (error) NOAUTH Authentication required.,通常是由于redis设了密码但连接时却未提供密码引起的. 设置密码: 编辑redis ...
- Android 6.0 - 动态权限管理的解决方案(转)
转自:http://www.cnblogs.com/dubo-/p/6018262.html Android 6.0 - 动态权限管理的解决方案 转载请标注 Android 6.0版本(Api 2 ...