机器学习技法笔记:Homework #6 AdaBoost&Kernel Ridge Regression相关习题
原文地址:http://www.jianshu.com/p/9bf9e2add795
AdaBoost
问题描述


程序实现
# coding:utf-8
import math
import numpy as np
import matplotlib.pyplot as plt
def ReadData(dataFile):
with open(dataFile, 'r') as f:
lines = f.readlines()
data_list = []
for line in lines:
line = line.strip().split()
data_list.append([float(l) for l in line])
dataArray = np.array(data_list)
return dataArray
def sign(n):
if(n>=0):
return 1
else:
return -1
def GetSortedArray(dataArray,i):
# 根据dataArray第i列的值对dataArray进行从小到大的排序
data_list=dataArray.tolist()
sorted_data_list=sorted(data_list,key=lambda x:x[i],reverse=False)
sortedDataArray=np.array(sorted_data_list)
return sortedDataArray
def GetUZeroOneError(pred,dataY,u):
return np.sum(u*np.not_equal(pred,dataY))/np.sum(u)
def GetZeroOneError(pred,dataY):
return np.sum(np.not_equal(pred,dataY))/dataY.shape[0]
def decision_stump(dataArray,u):
num_data=dataArray.shape[0]
num_dim=dataArray.shape[1]-1
min_e=np.inf
min_s = np.inf
min_d=np.inf
min_theta = np.inf
min_pred = np.zeros((num_data,))
for d in range(num_dim):
sortedDataArray=GetSortedArray(dataArray,d) # 确保有效theta
d_min_e=np.inf
d_min_s = np.inf
d_min_theta = np.inf
d_min_pred = np.zeros((num_data,))
for s in [-1.0,1.0]:
for i in range(num_data):
if(i==0):
theta=-np.inf
pred=s*np.ones((num_data,))
else:
if sortedDataArray[i-1,d]==sortedDataArray[i,d]:
continue
theta=(sortedDataArray[i-1,d]+sortedDataArray[i,d])/2
pred=np.zeros((num_data,))
for n in range(num_data):
pred[n]=s*sign(dataArray[n,d]-theta)
d_now_e=GetUZeroOneError(pred,dataArray[:,-1],u)
if(d_now_e<d_min_e):
d_min_e=d_now_e
d_min_s=s
d_min_theta=theta
d_min_pred=pred
if(d_min_e<min_e):
min_e=d_min_e
min_s=d_min_s
min_d=d
min_theta=d_min_theta
min_pred=d_min_pred
return min_s,min_d,min_theta,min_pred,min_e
def Pred(paraList,dataX):
# paraList=[s,d,theta]
num_data=dataX.shape[0]
pred=np.zeros((num_data,))
for i in range(num_data):
pred[i]=paraList[0]*sign(dataX[i,paraList[1]]-paraList[2])
return pred
def plot_line_chart(X=np.arange(0,300,1).tolist(),Y=np.arange(0,300,1).tolist(),nameX="t",nameY="Ein(gt)",saveName="12.png"):
plt.figure(figsize=(30,12))
plt.plot(X,Y,'b')
plt.plot(X,Y,'ro')
plt.xlim((X[0]-1,X[-1]+1))
for (x,y) in zip(X,Y):
if(x%10==0):
plt.text(x+0.1,y,str(round(y,4)))
plt.xlabel(nameX)
plt.ylabel(nameY)
plt.title(nameY+" versus "+nameX)
plt.savefig(saveName)
return
if __name__=="__main__":
dataArray=ReadData("hw2_adaboost_train.dat")
dataY=dataArray[:,-1]
dataX=dataArray[:,:-1]
num_data=dataArray.shape[0]
u=np.full(shape=(num_data,),fill_value=1/num_data)
ein_g_list=[]
alpha_list=[]
g_list=[]
ein_G_list=[]
u_sum_list=[]
epi_list=[]
min_pred_list=[]
# adaboost
for t in range(300):
u_sum_list.append(np.sum(u))
min_s,min_d,min_theta,min_pred,epi=decision_stump(dataArray,u)
g_list.append([min_s,min_d,min_theta])
min_pred_list.append(min_pred)
ein_g=GetZeroOneError(min_pred,dataY)
ein_g_list.append(ein_g)
epi_list.append(epi)
para=math.sqrt((1-epi)/epi)
alpha_list.append(math.log(para))
for i in range(num_data):
if min_pred[i]==dataY[i]:
u[i]/=para
else:
u[i]*=para
predG=np.zeros((num_data,))
for ta in range(t):
predG+=alpha_list[ta]*min_pred_list[ta]
for n in range(num_data):
predG[n]=sign(predG[n])
ein_G_list.append(GetZeroOneError(predG,dataY))
# 12
plot_line_chart(Y=ein_g_list)
print("Ein(g1):",ein_g_list[0])
print("alpha1:",alpha_list[0])
# 14
plot_line_chart(Y=ein_G_list,nameY="Ein(Gt)",saveName="14.png")
print("Ein(G):",ein_G_list[-1])
# 15
plot_line_chart(Y=u_sum_list, nameY="Ut", saveName="15.png")
print("U2:",u_sum_list[1])
print("UT:",u_sum_list[-1])
# 16
plot_line_chart(Y=epi_list,nameY="epsilon_t",saveName="16.png")
print("the minimum value of epsilon_t:",min(epi_list))
testArray=ReadData("hw2_adaboost_test.dat")
num_test=testArray.shape[0]
testX=testArray[:,:-1]
testY=testArray[:,-1]
pred_g_list=[]
eout_g_list=[]
eout_G_list=[]
for t in range(300):
pred_g=Pred(g_list[t],testX)
pred_g_list.append(pred_g)
eout_g_list.append(GetZeroOneError(pred_g,testY))
pred_G=np.zeros((num_test,))
for ta in range(t):
pred_G+=alpha_list[ta]*pred_g_list[ta]
sign_ufunc=np.frompyfunc(sign,1,1)
pred_G=sign_ufunc(pred_G)
eout_G_list.append(GetZeroOneError(pred_G,testY))
# 17
plot_line_chart(Y=eout_g_list, nameY="Eout(gt)", saveName="17.png")
print("Eout(g1):",eout_g_list[0])
# 18
plot_line_chart(Y=eout_G_list, nameY="Eout(Gt)", saveName="18.png")
print("Eout(G):",eout_G_list[-1])
运行结果







Kernel Ridge Regression
问题描述

程序实现
# coding:utf-8
import numpy as np
import math
def ReadData(dataFile):
with open(dataFile, 'r') as f:
lines = f.readlines()
data_list = []
for line in lines:
line = line.strip().split()
data_list.append([1.0]+[float(l) for l in line])
dataArray = np.array(data_list)
return dataArray
def sign(n):
if(n>=0):
return 1
else:
return -1
def RBFKernel(X1,X2,gamma):
return math.exp(-gamma*np.sum(np.square(X1-X2)))
def GetKernelMatrix(trainX,dataX,gamma):
num_train = trainX.shape[0]
num_data = dataX.shape[0]
mat = np.zeros((num_train,num_data))
for i in range(num_train):
if num_train==num_data and np.equal(trainX,dataX).all():
for j in range(i+1):
mat[i][j] = RBFKernel(dataX[i, :], dataX[j, :], gamma)
if(i!=j):
mat[j][i]=mat[i][j]
else:
for j in range(num_data):
mat[i][j]=RBFKernel(trainX[i,:],dataX[j,:],gamma)
return mat
def GetZeroOneError(pred,dataY):
return np.sum(np.not_equal(pred,dataY))/dataY.shape[0]
def KernelRidgeRegression(trainArray,lamb,gamma):
num_train=trainArray.shape[0]
trainX=trainArray[:,:-1]
trainY=trainArray[:,-1].reshape((num_train,1))
K=GetKernelMatrix(trainX,trainX,gamma)
beta=np.dot(np.linalg.inv(lamb*np.eye(num_train)+K),trainY)
return beta
def Predict(trainX,dataX,beta,gamma):
num_data=dataX.shape[0]
pred=np.zeros((num_data,))
K=GetKernelMatrix(trainX,dataX,gamma)
pred=np.dot(K.transpose(),beta).reshape((num_data,))
for n in range(num_data):
pred[n]=sign(pred[n])
return pred
if __name__=="__main__":
dataArray=ReadData("hw2_lssvm_all.dat")
trainArray=dataArray[:400,:]
testArray=dataArray[400:,:]
gammaList=[32,2,0.125]
lambdaList=[0.001,1,1000]
ein_list=[]
eout_list=[]
for l in lambdaList:
for g in gammaList:
beta=KernelRidgeRegression(trainArray,l,g)
ein_list.append(GetZeroOneError(Predict(trainArray[:,:-1],trainArray[:,:-1],beta,g),trainArray[:,-1]))
eout_list.append(GetZeroOneError(Predict(trainArray[:,:-1],testArray[:,:-1],beta,g),testArray[:,-1]))
min_ein=min(ein_list)
min_ein_id=ein_list.index(min_ein)
min_eout=min(eout_list)
min_eout_id=eout_list.index(min_eout)
# 19
print("the minimum Ein(g):",min_ein,",the corresponding parameter combinations: gamma=",gammaList[min_ein_id%3],",lambda=",lambdaList[min_ein_id//3])
# 20
print("the minimum Eout(g):",min_eout,",the corresponding parameter combinations: gamma=",gammaList[min_eout_id%3],",lambda=",lambdaList[min_eout_id//3])
运行结果

机器学习技法笔记:Homework #6 AdaBoost&Kernel Ridge Regression相关习题的更多相关文章
- 机器学习技法笔记(2)-Linear SVM
从这一节开始学习机器学习技法课程中的SVM, 这一节主要介绍标准形式的SVM: Linear SVM 引入SVM 首先回顾Percentron Learning Algrithm(感知器算法PLA)是 ...
- 机器学习技法笔记:06 Support Vector Regression
Roadmap Kernel Ridge Regression Support Vector Regression Primal Support Vector Regression Dual Summ ...
- support vector regression与 kernel ridge regression
前一篇,我们将SVM与logistic regression联系起来,这一次我们将SVM与ridge regression(之前的linear regression)联系起来. (一)kernel r ...
- Kernel ridge regression(KRR)
作者:桂. 时间:2017-05-23 15:52:51 链接:http://www.cnblogs.com/xingshansi/p/6895710.html 一.理论描述 Kernel ridg ...
- 机器学习技法笔记:05 Kernel Logistic Regression
Roadmap Soft-Margin SVM as Regularized Model SVM versus Logistic Regression SVM for Soft Binary Clas ...
- 机器学习技法笔记:03 Kernel Support Vector Machine
Roadmap Kernel Trick Polynomial Kernel Gaussian Kernel Comparison of Kernels Summary
- 机器学习技法笔记:Homework #5 特征变换&Soft-Margin SVM相关习题
原文地址:https://www.jianshu.com/p/6bf801bdc644 特征变换 问题描述 程序实现 # coding: utf-8 import numpy as np from c ...
- 机器学习技法笔记:Homework #8 kNN&RBF&k-Means相关习题
原文地址:https://www.jianshu.com/p/1db700f866ee 问题描述 程序实现 # kNN_RBFN.py # coding:utf-8 import numpy as n ...
- 机器学习技法笔记:Homework #7 Decision Tree&Random Forest相关习题
原文地址:https://www.jianshu.com/p/7ff6fd6fc99f 问题描述 程序实现 13-15 # coding:utf-8 # decision_tree.py import ...
随机推荐
- 使用Github SSH Key来避免Hexo部署时输入账户密码
博客原文:http://fengyao.me/2016/04/10/use-git-ssh-key-carry-hexo-deploy/ 前言 当hexo使用https方式连接Github时,每次执行 ...
- CTF 密码学(一)
0x00 前言 为了练习python,强迫自己能用Python的题都用python解题还有各种密码 0x01 奇怪的字符串 实验吧题目:信息保密的需求和实际操作自古有之,与之相应的信息加密与解密也是历 ...
- Struts1.3——文件上传和下载
1.Struts文件上传 在Web开发中,会经常涉及到文件的上传和下载,比如在注册账户的时候,我们需要上传自己的头像等. 我们可以利用Struts很方便地实现文件的上传. 1.1 开发步骤 现在,假设 ...
- upc组队赛5 Ingenious Lottery Tickets【排序】
Ingenious Lottery Tickets 题目描述 Your friend Superstitious Stanley is always getting himself into trou ...
- VIP视频下载终结器
youtube-dl: Youtube-dl是谷歌github上的一个开源项目,它是一款轻量级的命令行 下载实用工具,阿刚曾在乐软博客里文章<不仅仅是youtube,youtube-dl在线视频 ...
- 在规定的时间内出现动画.html
<!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <title> ...
- python+tushare获取沪深港股通持股明细
接口:hk_hold 描述:获取沪深港股通持股明细,数据来源港交所. 限量:单次最多提取3800条记录,可循环调取,总量不限制 积分:用户积120积分可调取试用,2000积分可正常使用,单位分钟有流控 ...
- css中的居中问题
前两天写了一篇关于display:table的用法,里面涉及到居中的问题,这两天愈发觉得css中的居中是一个值得关注的问题,现总结如下. 一.垂直居中 (1)inline或者inline-*元素 1. ...
- 多线程实现奇偶统计v1 - 暴力版
#include <stdio.h> #include <stdlib.h> #include <time.h> #include "pthread.h& ...
- 在Ubuntu下安装deb包需要使用dpkg命令
Dpkg 的普通用法: 1.sudo dpkg -i <package.deb> 安装一个 Debian 软件包,如你手动下载的文件. 2.sudo dpkg -c <package ...