辅助函数

牛顿法介绍

 %% Logistic Regression
close all
clear %%load data
x = load('ex4x.dat');
y = load('ex4y.dat'); [m, n] = size(x); % Add intercept term to x
x = [ones(m, ), x]; %%draw picture
% find returns the indices of the
% rows meeting the specified condition
pos = find(y == );
neg = find(y == );
% Assume the features are in the 2nd and 3rd
% columns of x
figure('NumberTitle', 'off', 'Name', 'GD');
plot(x(pos, ), x(pos,), '+');
hold on;
plot(x(neg, ), x(neg, ), 'o'); % Define the sigmoid function
g = inline('1 ./ (1 + exp(-z))'); alpha = 0.001;
theta = [-,,]';
obj_old = 1e10;
tor = 1e-; tic %%Gradient Descent
for time = :
delta = zeros(,);
objective = ; for i = :
z = x(i,:) * theta;
h = g(z);%转换成logistic函数
delta = (/m) .* x(i,:)' * (y(i)-h) + delta;
objective = (/m) .*( -y(i) * log(h) - (-y(i)) * log(-h)) + objective;
end
theta = theta + alpha * delta; fprintf('objective is %.4f\n', objective);
if abs(obj_old - objective) < tor
fprintf('torlerance is samller than %.4f\n', tor);
break;
end
obj_old = objective;
end %%Calculate the decision boundary line
plot_x = [min(x(:,)), max(x(:,))];
plot_y = (-./theta()).*(theta().*plot_x +theta());
plot(plot_x, plot_y)
legend('Admitted', 'Not admitted', 'Decision Boundary')
hold off
toc
pause();
%%SGD figure('NumberTitle', 'off', 'Name', 'SGD');
plot(x(pos, ), x(pos,), '+');
hold on;
plot(x(neg, ), x(neg, ), 'o'); alpha = 0.001;
theta = [-,,]';
obj_old = 1e10;
tor = 1e-;
k=;
U=ceil(m/k); for time = :
delta = zeros(,);
rand('twister',time*);
idx=randperm(m);
objective = ; subidx=idx(:k);
for i=:length(subidx)
z = x(subidx(i),:) * theta;
h = g(z);%转换成logistic函数
delta = (/k) .* x(subidx(i),:)' * (y(subidx(i))-h) + delta;
objective = (/k) .*( -y(subidx(i)) * log(h) - (-y(subidx(i))) * log(-h)) + objective;
end
theta = theta + alpha * delta; fprintf('objective is %.4f\n', objective);
if abs(obj_old - objective) < tor
fprintf('torlerance is samller than %.4f\n', tor);
break;
end
obj_old = objective;
end %%Calculate the decision boundary line
plot_x = [min(x(:,)), max(x(:,))];
plot_y = (-./theta()).*(theta().*plot_x +theta());
plot(plot_x, plot_y)
legend('Admitted', 'Not admitted', 'Decision Boundary')
hold off
toc
pause() %%Newton's method figure('NumberTitle', 'off', 'Name', 'Newton');
plot(x(pos, ), x(pos,), '+');
hold on;
plot(x(neg, ), x(neg, ), 'o'); alpha = 0.001;
theta = zeros(, );
obj_old = 1e10;
tor = 1e-; for i = :
delta = zeros(,);
delta_H = zeros(,);
objective = ;
% Calculate the hypothesis function
for i = :
z = x(i,:) * theta;
h = g(z);%转换成logistic函数
delta = (/m) .* x(i,:)' * (h-y(i)) + delta;
delta_H = (/m).* x(i,:)' * h * (1-h) * x(i,:) + delta_H;
objective = (/m) .*( -y(i) * log(h) - (-y(i)) * log(-h)) + objective;
end
theta = theta - delta_H\delta;
fprintf('objective is %.4f\n', objective);
if abs(obj_old - objective) < tor
fprintf('torlerance is samller than %.4f\n', tor);
break;
end
obj_old = objective;
end %%Calculate the decision boundary line
plot_x = [min(x(:,)), max(x(:,))];
plot_y = (-./theta()).*(theta().*plot_x +theta());
plot(plot_x, plot_y)
legend('Admitted', 'Not admitted', 'Decision Boundary')
hold off
toc
 %% Softmax Regression
close all
clear %%load data
load('my_ex4x.mat');
load('my_ex4y.mat'); [m, n] = size(x); % Add intercept term to x
x = [ones(m, ), x];
y = y + ; class_num = max(y);
n = n + ; %%draw picture
% find returns the indices of the
% rows meeting the specified condition
class2 = find(y == );
class1 = find(y == );
class3 = find(y == );
% Assume the features are in the 2nd and 3rd
% columns of x
figure('NumberTitle', 'off', 'Name', 'GD');
plot(x(class2, ), x(class2,), '+');
hold on;
plot(x(class1, ), x(class1, ), 'o');
hold on;
plot(x(class3, ), x(class3, ), '*');
hold on; % Define the sigmoid function
g = inline('exp(z) ./ sumz','z','sumz'); alpha = 0.0001;
theta = [-,0.15,0.14;-,,-]';
obj_old = 1e10;
tor = 1e-; %%Gradient Descent
for time = :
delta = zeros(,);
objective = ; for i = :
for j = :
z = x(i,:) * theta(:,j);
sumz = exp(x(i,:) * theta(:,)) + exp(x(i,:) * theta(:,)) + ;
h = g(z,sumz);%转换成logistic函数
if y(i)==j
delta = (/m) .* x(i,:)' * (1-h);
theta(:,j) = theta(:,j) + alpha * delta;
objective = (/m) .*(-y(i) * log(h)) + objective;
else
delta = (/m) .* x(i,:)' * (-h);
theta(:,j) = theta(:,j) + alpha * delta;
objective = (/m) .*(-(-y(i)) * log(-h)) + objective;
end
end
end fprintf('objective is %.4f\n', objective);
if abs(obj_old - objective) < tor
fprintf('torlerance is samller than %.4f\n', tor);
break;
end
obj_old = objective;
end %%Calculate the decision boundary line
plot_x = [min(x(:,)), max(x(:,))];
plot_y = (-./theta(,)).*(theta(,).*plot_x +theta(,));
plot(plot_x, plot_y)
legend('Admitted', 'Not admitted', 'Decision Boundary')
hold on plot_y = (-./theta(,)).*(theta(,).*plot_x +theta(,));
plot(plot_x, plot_y)
legend('Admitted', 'Not admitted', 'Decision Boundary')
hold off

Logistic/Softmax Regression的更多相关文章

  1. 机器学习方法(五):逻辑回归Logistic Regression,Softmax Regression

    欢迎转载,转载请注明:本文出自Bin的专栏blog.csdn.net/xbinworld. 技术交流QQ群:433250724,欢迎对算法.技术.应用感兴趣的同学加入. 前面介绍过线性回归的基本知识, ...

  2. Softmax回归(Softmax Regression)

    转载请注明出处:http://www.cnblogs.com/BYRans/ 多分类问题 在一个多分类问题中,因变量y有k个取值,即.例如在邮件分类问题中,我们要把邮件分为垃圾邮件.个人邮件.工作邮件 ...

  3. TensorFlow实战之Softmax Regression识别手写数字

         关于本文说明,本人原博客地址位于http://blog.csdn.net/qq_37608890,本文来自笔者于2018年02月21日 23:10:04所撰写内容(http://blog.c ...

  4. R︱Softmax Regression建模 (MNIST 手写体识别和文档多分类应用)

    本文转载自经管之家论坛, R语言中的Softmax Regression建模 (MNIST 手写体识别和文档多分类应用) R中的softmaxreg包,发自2016-09-09,链接:https:// ...

  5. TensorFlow(2)Softmax Regression

    Softmax Regression Chapter Basics generate random Tensors Three usual activation function in Neural ...

  6. 逻辑回归与神经网络还有Softmax regression的关系与区别

    本文讨论的关键词:Logistic Regression(逻辑回归).Neural Networks(神经网络) 之前在学习LR和NN的时候,一直对它们独立学习思考,就简单当做是机器学习中的两个不同的 ...

  7. 深度学习 Deep Learning UFLDL 最新Tutorial 学习笔记 5:Softmax Regression

    Softmax Regression Tutorial地址:http://ufldl.stanford.edu/tutorial/supervised/SoftmaxRegression/ 从本节開始 ...

  8. 2.1、Softmax Regression模型

    Softmax Regression模型 由于Logistics Regression算法复杂度低,容易实现等特点,在工业中的到广泛的使用,但是Logistics Regression算法主要用于处理 ...

  9. 基于MNIST数据的softmax regression

    跟着tensorflow上mnist基本机器学习教程联系 首先了解sklearn接口: sklearn.linear_model.LogisticRegression In the multiclas ...

随机推荐

  1. iOS开发 当前时间 时间戳 转换

    1.今天在做一个webservice的接口的时候,被要求传一个时间戳过去,然后就是开始在Google上找 2.遇到两个问题,一,当前时间转化为时间戳,二,获取的当前时间和系统的时间相差8个小时 一,转 ...

  2. PostgreSQL触发器的使用

    原文: https://www.yiibai.com/postgresql/postgresql-trigger.html -------------------------------------- ...

  3. MySQL的字符编码体系(一)——数据存储编码

    安装MySQL好多次了,每次都会纠结于数据库的字符编码配置,所以我决定这一次彻底把它理清. MySQL的字符编码结构比較细,它慷慨向分为两个部分:数据存储编码和传输数据编码.本篇讨论数据存储编码部分, ...

  4. 安装 python 的 pip install fabric 库 问题

    安装 pip install fabric 安装依赖需要 gcc 并且不能单独的安装gcc 还要安装完整的gcc依赖 yum -y install gcc gcc-c++ kernel-devel y ...

  5. Effective C++ 条款七 为多态基类声明virtual析构函数

    class TimeKeeper { public: TimeKeeper(); // ~TimeKeeper(); 错误,此作为一个基类,被继承了.其继承类被delete后,基类被销毁,但继承类可能 ...

  6. ios NSAttributedString 具体解释

    ios NSAttributedString 具体解释 NSAttributedString能够让我们使一个字符串显示的多样化,可是眼下到iOS 5为止,好像对它支持的不是非常好,由于显示起来不太方便 ...

  7. c#中的多态 c#中的委托

    C#中的多态性          相信大家都对面向对象的三个特征封装.继承.多态很熟悉,每个人都能说上一两句,但是大多数都仅仅是知道这些是什么,不知道CLR内部是如何实现的,所以本篇文章主要说说多态性 ...

  8. DirectX11 学习笔记5 - D3DXMath 库

    directx11 特别推出了一个数学库 用于向量 矩阵的计算.并且使用128位 同一时候计算4条32位数据 (SIMD) 之前基本是用的directx10math.h这个头文件.  这个数学库既然是 ...

  9. 组件接口(API)设计指南[2]-类接口(class interface)

    *返回文件夹阅读其它章节: http://blog.csdn.net/cuibo1123/article/details/39894477 类接口(class interface) 你能够參考MGTi ...

  10. Linux在本地使用yum安装软件(转)

    经常遇到有的linux服务器由于特殊原因,不能连接外网,但是经常需要安装一些软件,尤其是在编译一些包的时候经常由于没有安装一些依存包而报的各种各样的错误,当你找到依存的rpm包去安装的时候,又提示你有 ...