首页 > 其他 > 详细

Logistic/Softmax Regression

时间:2017-02-23 21:08:02      阅读:544      评论:0      收藏:0      [点我收藏+]

辅助函数

技术分享

技术分享

技术分享

技术分享

牛顿法介绍

技术分享

技术分享

技术分享

技术分享

  1 %% Logistic Regression
  2 close all
  3 clear
  4 
  5 %%load data
  6 x = load(ex4x.dat);
  7 y = load(ex4y.dat);
  8 
  9 [m, n] = size(x);
 10 
 11 % Add intercept term to x
 12 x = [ones(m, 1), x];
 13 
 14 %%draw picture
 15 % find returns the indices of the
 16 % rows meeting the specified condition
 17 pos = find(y == 1);
 18 neg = find(y == 0);
 19 % Assume the features are in the 2nd and 3rd
 20 % columns of x
 21 figure(NumberTitle, off, Name, GD);
 22 plot(x(pos, 2), x(pos,3), +);
 23 hold on;
 24 plot(x(neg, 2), x(neg, 3), o);
 25 
 26 % Define the sigmoid function
 27 g = inline(1 ./ (1 + exp(-z)));
 28 
 29 alpha = 0.001;
 30 theta = [-10,1,1];
 31 obj_old = 1e10;
 32 tor = 1e-4;
 33 
 34 tic
 35 
 36 %%Gradient Descent
 37 for time = 1:1000
 38     delta = zeros(3,1);
 39     objective = 0;
 40 
 41     for i = 1:80
 42         z = x(i,:) * theta;
 43         h = g(z);%转换成logistic函数
 44         delta = (1/m) .* x(i,:) * (y(i)-h) + delta;
 45         objective = (1/m) .*( -y(i) * log(h) - (1-y(i)) * log(1-h)) + objective;
 46     end
 47     theta = theta + alpha * delta;
 48 
 49     fprintf(objective is %.4f\n, objective);
 50     if abs(obj_old - objective) < tor
 51         fprintf(torlerance is samller than %.4f\n, tor);
 52         break;
 53     end
 54     obj_old = objective;
 55 end
 56 
 57 %%Calculate the decision boundary line
 58 plot_x = [min(x(:,2)),  max(x(:,2))];
 59 plot_y = (-1./theta(3)).*(theta(2).*plot_x +theta(1));
 60 plot(plot_x, plot_y)
 61 legend(Admitted, Not admitted, Decision Boundary)
 62 hold off
 63 toc
 64 pause(5);
 65 %%SGD
 66 
 67 figure(NumberTitle, off, Name, SGD);
 68 plot(x(pos, 2), x(pos,3), +);
 69 hold on;
 70 plot(x(neg, 2), x(neg, 3), o);
 71 
 72 alpha = 0.001;
 73 theta = [-10,1,1];
 74 obj_old = 1e10;
 75 tor = 1e-4;
 76 k=10;
 77 U=ceil(m/k);
 78 
 79 for time = 1:10000
 80     delta = zeros(3,1);
 81     rand(twister,time*4);
 82     idx=randperm(m);
 83     objective = 0;
 84 
 85     subidx=idx(1:k);
 86     for i=1:length(subidx)
 87         z = x(subidx(i),:) * theta;
 88         h = g(z);%转换成logistic函数
 89         delta = (1/k) .* x(subidx(i),:) * (y(subidx(i))-h) + delta;
 90         objective = (1/k) .*( -y(subidx(i)) * log(h) - (1-y(subidx(i))) * log(1-h)) + objective;
 91     end
 92     theta = theta + alpha * delta;
 93 
 94     fprintf(objective is %.4f\n, objective);
 95     if abs(obj_old - objective) < tor
 96         fprintf(torlerance is samller than %.4f\n, tor);
 97         break;
 98     end
 99     obj_old = objective;
100 end
101 
102 %%Calculate the decision boundary line
103 plot_x = [min(x(:,2)),  max(x(:,2))];
104 plot_y = (-1./theta(3)).*(theta(2).*plot_x +theta(1));
105 plot(plot_x, plot_y)
106 legend(Admitted, Not admitted, Decision Boundary)
107 hold off
108 toc
109 pause(5)
110 
111 %%Newtons method
112 
113 figure(NumberTitle, off, Name, Newton);
114 plot(x(pos, 2), x(pos,3), +);
115 hold on;
116 plot(x(neg, 2), x(neg, 3), o);
117 
118 alpha = 0.001;
119 theta = zeros(3, 1);
120 obj_old = 1e10;
121 tor = 1e-4;
122 
123 for i = 1:100
124     delta = zeros(3,1);
125     delta_H = zeros(3,3);
126     objective = 0;
127     % Calculate the hypothesis function
128     for i = 1:80
129         z = x(i,:) * theta;
130         h = g(z);%转换成logistic函数
131         delta = (1/m) .* x(i,:) * (h-y(i)) + delta;
132         delta_H = (1/m).* x(i,:) * h * (1-h) * x(i,:) + delta_H;
133         objective = (1/m) .*( -y(i) * log(h) - (1-y(i)) * log(1-h)) + objective;
134     end
135     theta = theta - delta_H\delta;
136     fprintf(objective is %.4f\n, objective);
137     if abs(obj_old - objective) < tor
138         fprintf(torlerance is samller than %.4f\n, tor);
139         break;
140     end
141     obj_old = objective;
142 end
143 
144 %%Calculate the decision boundary line
145 plot_x = [min(x(:,2)),  max(x(:,2))];
146 plot_y = (-1./theta(3)).*(theta(2).*plot_x +theta(1));
147 plot(plot_x, plot_y)
148 legend(Admitted, Not admitted, Decision Boundary)
149 hold off
150 toc
 1 %% Softmax Regression
 2 close all
 3 clear
 4 
 5 %%load data
 6 load(my_ex4x.mat);
 7 load(my_ex4y.mat);
 8 
 9 [m, n] = size(x);
10 
11 % Add intercept term to x
12 x = [ones(m, 1), x];
13 y = y + 1;
14 
15 class_num = max(y);
16 n = n + 1;
17 
18 %%draw picture
19 % find returns the indices of the
20 % rows meeting the specified condition
21 class2 = find(y == 2);
22 class1 = find(y == 1);
23 class3 = find(y == 3);
24 % Assume the features are in the 2nd and 3rd
25 % columns of x
26 figure(NumberTitle, off, Name, GD);
27 plot(x(class2, 2), x(class2,3), +);
28 hold on;
29 plot(x(class1, 2), x(class1, 3), o);
30 hold on;
31 plot(x(class3, 2), x(class3, 3), *);
32 hold on;
33 
34 
35 % Define the sigmoid function
36 g = inline(exp(z) ./ sumz,z,sumz);
37 
38 alpha = 0.0001;
39 theta = [-16,0.15,0.14;-10,1,-1];
40 obj_old = 1e10;
41 tor = 1e-4;
42 
43 %%Gradient Descent
44 for time = 1:1000
45     delta = zeros(3,1);
46     objective = 0;
47     
48     for i = 1:120
49         for j = 1:2
50             z = x(i,:) * theta(:,j);
51             sumz = exp(x(i,:) * theta(:,1)) +  exp(x(i,:) * theta(:,2)) + 1;
52             h = g(z,sumz);%转换成logistic函数
53             if y(i)==j
54                 delta = (1/m) .* x(i,:) * (1-h);
55                 theta(:,j) = theta(:,j) + alpha * delta;
56                 objective = (1/m) .*(-y(i) * log(h)) + objective;
57             else
58                 delta = (1/m) .* x(i,:) * (-h);
59                 theta(:,j) = theta(:,j) + alpha * delta;
60                 objective = (1/m) .*(-(1-y(i)) * log(1-h)) + objective;
61             end
62         end
63     end
64     
65     fprintf(objective is %.4f\n, objective);
66     if abs(obj_old - objective) < tor
67         fprintf(torlerance is samller than %.4f\n, tor);
68         break;
69     end
70     obj_old = objective;
71 end
72 
73 %%Calculate the decision boundary line
74 plot_x = [min(x(:,2)),  max(x(:,2))];
75 plot_y = (-1./theta(3,1)).*(theta(2,1).*plot_x +theta(1,1));
76 plot(plot_x, plot_y)
77 legend(Admitted, Not admitted, Decision Boundary)
78 hold on
79 
80 plot_y = (-1./theta(3,2)).*(theta(2,2).*plot_x +theta(1,2));
81 plot(plot_x, plot_y)
82 legend(Admitted, Not admitted, Decision Boundary)
83 hold off

 

Logistic/Softmax Regression

原文:http://www.cnblogs.com/pkjplayer/p/6435239.html

(0)
(0)
   
举报
评论 一句话评论(0
关于我们 - 联系我们 - 留言反馈 - 联系我们:wmxa8@hotmail.com
© 2014 bubuko.com 版权所有
打开技术之扣,分享程序人生!