test14.py

#-*- coding:utf-8
import sys
sys.path.append("svdRec.py") import svdRec
from numpy import *
from numpy import linalg as la # U, Sigma, VT = linalg.svd([[1, 1], [7, 7]])
# print(U)
# print(Sigma)
# print(VT) # Data = svdRec.loadExData()
# U, Sigma, VT = linalg.svd(Data)
# print(Sigma)
#
# Sig3 = mat([[Sigma[0], 0, 0], [0, Sigma[1], 0], [0, 0, Sigma[2]]])
# res = U[:, :3]*Sig3*VT[:3, :]
# print("res:")
# print(res)
#
# myMat = mat(svdRec.loadExData())
# ecl = svdRec.ecludSim(myMat[:, 0], myMat[:, 4])
# print("ecl:")
# print(ecl)
# cos = svdRec.cosSim(myMat[:, 0], myMat[:, 4])
# print("cos:")
# print(cos)
# pears = svdRec.pearsSim(myMat[:, 0], myMat[:, 4])
# print("pears:")
# print(pears)
# myMat = mat(svdRec.loadExData())
# myMat[0, 1] = myMat[0, 0] = myMat[1, 0] = myMat[2, 0] = 4
# myMat[3, 3] = 2
# print("myMat:")
# print(myMat)
#
# tuiJian = svdRec.recommend(myMat, 2)
# print("tuiJian:")
# print(tuiJian)
#
# tuiJian1 = svdRec.recommend(myMat, 2, simMeas = svdRec.ecludSim)
# print("tuiJian1:")
# print(tuiJian1)
#
# tuiJian2 = svdRec.recommend(myMat, 2, simMeas = svdRec.pearsSim)
# print("tuiJian2:")
# print(tuiJian2) # myMat = mat(svdRec.loadExData2())
# U, Sigma, VT = la.svd(mat(svdRec.loadExData2()))
# print(Sigma)
#
# Sig2 = Sigma**2
# total = sum(Sig2)
# total9 = total*0.9
# print("total9:")
# print(total9)
#
# total3 = sum(Sig2[:3])
# print("total3:")
# print(total3) # svdRes = svdRec.recommend(myMat, 1, estMethod = svdRec.svdEst)
# print("svdRes:")
# print(svdRes) originalMat = svdRec.imgCompress(2)
print(originalMat) print("over!!!")
svdRec.py
'''
Created on Mar 8, 2011 @author: Peter
'''
from numpy import *
from numpy import linalg as la def loadExData():
return[[0, 0, 0, 2, 2],
[0, 0, 0, 3, 3],
[0, 0, 0, 1, 1],
[1, 1, 1, 0, 0],
[2, 2, 2, 0, 0],
[5, 5, 5, 0, 0],
[1, 1, 1, 0, 0]] def loadExData2():
return[[0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 5],
[0, 0, 0, 3, 0, 4, 0, 0, 0, 0, 3],
[0, 0, 0, 0, 4, 0, 0, 1, 0, 4, 0],
[3, 3, 4, 0, 0, 0, 0, 2, 2, 0, 0],
[5, 4, 5, 0, 0, 0, 0, 5, 5, 0, 0],
[0, 0, 0, 0, 5, 0, 1, 0, 0, 5, 0],
[4, 3, 4, 0, 0, 0, 0, 5, 5, 0, 1],
[0, 0, 0, 4, 0, 4, 0, 0, 0, 0, 4],
[0, 0, 0, 2, 0, 2, 5, 0, 0, 1, 2],
[0, 0, 0, 0, 5, 0, 0, 0, 0, 4, 0],
[1, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0]] def ecludSim(inA,inB):
return 1.0/(1.0 + la.norm(inA - inB)) def pearsSim(inA,inB):
if len(inA) < 3 : return 1.0
return 0.5+0.5*corrcoef(inA, inB, rowvar = 0)[0][1] def cosSim(inA,inB):
num = float(inA.T*inB)
denom = la.norm(inA)*la.norm(inB)
return 0.5+0.5*(num/denom) def standEst(dataMat, user, simMeas, item):
n = shape(dataMat)[1]
simTotal = 0.0; ratSimTotal = 0.0
for j in range(n):
userRating = dataMat[user,j]
if userRating == 0: continue
# test0 = dataMat[:,item].A>0
# test1 = dataMat[:,j].A>0
# test2 = logical_and(dataMat[:,item].A>0, dataMat[:,j].A>0)
overLap = nonzero(logical_and(dataMat[:,item].A>0, dataMat[:,j].A>0))[0]
if len(overLap) == 0: similarity = 0
else: similarity = simMeas(dataMat[overLap,item], dataMat[overLap,j])
print('the %d and %d similarity is: %f' % (item, j, similarity))
simTotal += similarity
ratSimTotal += similarity * userRating
if simTotal == 0: return 0
else: return ratSimTotal/simTotal def svdEst(dataMat, user, simMeas, item):
n = shape(dataMat)[1]
simTotal = 0.0; ratSimTotal = 0.0
U,Sigma,VT = la.svd(dataMat)
Sig4 = mat(eye(4)*Sigma[:4]) #arrange Sig4 into a diagonal matrix
xformedItems = dataMat.T * U[:,:4] * Sig4.I #create transformed items
for j in range(n):
userRating = dataMat[user,j]
if userRating == 0 or j==item: continue
similarity = simMeas(xformedItems[item,:].T, xformedItems[j,:].T)
print('the %d and %d similarity is: %f' % (item, j, similarity))
simTotal += similarity
ratSimTotal += similarity * userRating
if simTotal == 0: return 0
else: return ratSimTotal/simTotal def recommend(dataMat, user, N=3, simMeas=cosSim, estMethod=standEst):
unratedTest = nonzero(dataMat[user,:].A==0)
unratedItems = nonzero(dataMat[user,:].A==0)[1]#find unrated items
if len(unratedItems) == 0: return 'you rated everything'
itemScores = []
for item in unratedItems:
estimatedScore = estMethod(dataMat, user, simMeas, item)
itemScores.append((item, estimatedScore))
# testSort = sorted(itemScores, key=lambda jj: jj[1], reverse=True)[:N]
return sorted(itemScores, key=lambda jj: jj[1], reverse=True)[:N] def printMat(inMat, thresh=0.8):
for i in range(32):
for k in range(32):
if float(inMat[i,k]) > thresh:
print(1),
else: print(0),
print('') def imgCompress(numSV=3, thresh=0.8):
myl = []
for line in open('0_5.txt').readlines():
newRow = []
for i in range(32):
newRow.append(int(line[i]))
myl.append(newRow)
myMat = mat(myl)
print("****original matrix******")
printMat(myMat, thresh)
U,Sigma,VT = la.svd(myMat)
SigRecon = mat(zeros((numSV, numSV)))
for k in range(numSV):#construct diagonal matrix from vector
SigRecon[k,k] = Sigma[k]
reconMat = U[:,:numSV]*SigRecon*VT[:numSV,:]
print("****reconstructed matrix using %d singular values******" % numSV)
printMat(reconMat, thresh)
												

机器学习14—SVD学习笔记的更多相关文章

  1. 《机器学习实战》学习笔记第十四章 —— 利用SVD简化数据

    相关博客: 吴恩达机器学习笔记(八) —— 降维与主成分分析法(PCA) <机器学习实战>学习笔记第十三章 —— 利用PCA来简化数据 奇异值分解(SVD)原理与在降维中的应用 机器学习( ...

  2. 《机器学习实战》学习笔记第九章 —— 决策树之CART算法

    相关博文: <机器学习实战>学习笔记第三章 —— 决策树 主要内容: 一.CART算法简介 二.分类树 三.回归树 四.构建回归树 五.回归树的剪枝 六.模型树 七.树回归与标准回归的比较 ...

  3. (转载)林轩田机器学习基石课程学习笔记1 — The Learning Problem

    (转载)林轩田机器学习基石课程学习笔记1 - The Learning Problem When Can Machine Learn? Why Can Machine Learn? How Can M ...

  4. Coursera台大机器学习基础课程学习笔记1 -- 机器学习定义及PLA算法

    最近在跟台大的这个课程,觉得不错,想把学习笔记发出来跟大家分享下,有错误希望大家指正. 一机器学习是什么? 感觉和 Tom M. Mitchell的定义几乎一致, A computer program ...

  5. 《SAS编程和数据挖掘商业案例》第14部分学习笔记

    继续<SAS编程与数据挖掘商业案例>学习笔记系列,本次重点:经常使用全程语句 所谓全程语句.是指能够用在不论什么地方的sas语句,既能够用在data数据步语句里面,也能够用在proc过程步 ...

  6. MNIST机器学习入门【学习笔记】

    平台信息:PC:ubuntu18.04.i5.anaconda2.cuda9.0.cudnn7.0.5.tensorflow1.10.GTX1060 作者:庄泽彬(欢迎转载,请注明作者) 说明:本文是 ...

  7. 《机器学习实战》学习笔记——第14章 利用SVD简化数据

    一. SVD 1. 基本概念: (1)定义:提取信息的方法:奇异值分解Singular Value Decomposition(SVD) (2)优点:简化数据, 去除噪声,提高算法的结果 (3)缺点: ...

  8. [转]Python3《机器学习实战》学习笔记(一):k-近邻算法(史诗级干货长文)

    转自http://blog.csdn.net/c406495762/article/details/75172850 版权声明:本文为博主原创文章,未经博主允许不得转载.   目录(?)[-] 一 简 ...

  9. 林轩田机器学习基石课程学习笔记5 — Training versus Testing

    上节课,我们主要介绍了机器学习的可行性.首先,由NFL定理可知,机器学习貌似是不可行的.但是,随后引入了统计学知识,如果样本数据足够大,且hypothesis个数有限,那么机器学习一般就是可行的.本节 ...

随机推荐

  1. JD2

    Business Requirement Support l Develops and communicates plan to manage vendor review of requirement ...

  2. easyui combogrid 按需加载,点击下拉加载

    功能优点:减少不必要的http请求,减少服务器查询压力,降低额外的浏览器渲染,提高呈现速度开发分享: combogrid 点击才请求的功能实现简要:我分析了费用系统,和现在全网的写法.并不满意.都是要 ...

  3. ZoomControls控件是一个可以缩放控件,可以实现两个按钮控制图片的大小

    <?xml version="1.0" encoding="utf-8"?> <LinearLayout xmlns:android=&quo ...

  4. 【Mybatis】mybatis查询报错org.apache.ibatis.reflection.ReflectionException: There is no getter for property named 'areaName' in 'class java.lang.String'

    mybatis查询报错: Caused by: org.apache.ibatis.reflection.ReflectionException: There is no getter for pro ...

  5. centos7 安装LNMP(php7)之php7.0 yum安装

    http://www.jianshu.com/p/35f21210668a 安装过程参考上面的网址

  6. php安装扩展步骤(redis)

    星哥让装一个扩展,解决PDF抓PNG的问题,功能没有实现,有点小悲伤,但是还是学到点东西的. php安装扩展步骤(以redis为例) 前提注意:在自己的LINUX本机上一定要安装有redis软件,我之 ...

  7. C++之类成员所占内存大小问题总结

    1.空类所占字节数为1,可见代码如下 #include <iostream> using namespace std; class Parent { }; class Child:publ ...

  8. 2017.5.27 使用propagation实现:根据参数决定是否需要事务管理

    1.功能描述 要实现rest接口:POST ***/entry,其中参数中有action参数. 当action=rollback时,批量新增出错时需要回滚. 当action!=rollback时,批量 ...

  9. ITIL,是否已是昨日黄花

    首先声明自己不是ITIL方面的专家,特别是具体的规范细节,后面论述如有不当,请指正.但我为什么会提起它?主要是因为它和运维(IT服务管理)相关性太大了.早起的运维完全就是以ITIL来蓝本构建的,在当时 ...

  10. 用MyEclipse2016 CI版创建一个SpringBoot程序

    之前先要在Eclipse里安装STS,步骤如下: 1.点击菜单Help->Install from Catalog 2.在弹出的对话框中点击Popular选项卡,在STS旁边点Install按钮 ...