##ex2.m
=============plotting================
data = load(‘ex2data1.txt’);
X = data(:,[1,2]);
y = data(:,3);
plotData(X,y);
plotData.m
function plotData(X,y)
figure;hold on;
pos = find(y==1);
neg = find(y==0);
plot(X(pos,1),X(pos,2),'k+','LineWidth',2,...
'MarkerSize',7);
plot(X(neg,1),X(neg,2),'ko','MarkerFaceColor','y',...
'MarkerSize',7);
hold off;
end
=============compute cost and gradient===========
[m,n] = size(X);
X = [ones(m,1) X];
initial_theta = zeros(n+1,1);
[cost,grad] = costFunction(initial_theta,X,y);
costFunction.m
function [J,grad] = costFunction(theta,X,y)
m = length(y);
J = 0;
grad = zeros(size(theta));
J = (-1)/m *(log(sigmoid(X*theta))'*y +
log(1-sigmoid(X*theta))'*(1-y));
for i = 1: size(X,2)
grad(i) = 1/m * sum((sigmoid(X*theta)-y) .* X(:,i));
end
end
plotDecisionBoundary(theta,X,y);
=============predict and accuracies===============
prob = sigmoid([1 45 85] * theta);
p = predict(theta,X);
predict.m
function p = predict(theta,X)
m = size(X,1);
p = zeros(m,1);
for i = 1:m
if(sigmoid(X(i,:) * theta)) >= 0.5
p(i) = 1;
else
p(i) = 0;
end
end
end
ex2_reg.m
clear;
data = load(‘ex2data2.txt’);
X = data(:,[1,2]);
y = data(:,3);
plotData(X,y);
=====================regularized Logistic Regression======
X = mapFeature(X(:,1),X(:,2));
initial_theta = zeros(size(X,2),1);
lambda = 1;
[cost,grad] = costFunctionReg(initial_theta,X,y,lambda);
costFunctionReg.m
function [J,grad] = costFunctionReg(theta,X,y,lambda)
m = length(y);
J = 0;
grad = zeros(size(theta));
temp = theta(2:size(theta,1),:) .^2;
value = sum(temp);
J = (-1)/m * (log(sigmoid(X*theta))'*y +
log(1-sigmoid(X*theta))'*(1-y))
+lambda/(2*m) * value;
grad(1) = 1/m*sum((sigmoid(X*theta) - y) .* X(:,1));
for i = 2: size(X,2)
grad(i) = 1 / m *sum((sigmoid(X*theta) -y).*X(:,i))
+lambda/m * theta(i);
end
end