Machine Learning编程作业1——Linear Regression

作业源码

warmUpExercise.m ____basic function

fprintf(‘Running warmUpExercise …\n’);

fprintf(‘5*5 Identity Matrix: \n”);

warmUpExercise()

function A = warmUpExercise()

A = [];
A = eye(5);

end

plotData.m ____Plotting

fprintf(‘Plotting Data…\n’)

data = load(‘ex1data1.txt’);

X = data(:,1); y = data(;,2);

m = length(y); %num of training examples

plotData(X,y)

function plotData(x,y)

figure;
plot(x,y,'rx','MarkerSize',10);
ylabel('Profit in $10,000s');
xlabel('Poputation of City in 10,000s');

end

gradientDescent.m ____Gradient Descent

fprintf(‘Running Gradient Descent …\n’)

X = [ones[m,1],data(;,1)];

theta = zeros(2,1);

iterations = 1500;

alpha = 0.01;

computeCost(X,y,theta);

computeCost.m ____compute initial cost

function J = computeCost(X,y,theta)

m = length(y);
J = 0;
cost = 0;
for i = 1 : m
    cost += (theta(1,1) * X(i,1) + theta(2,1) * X(i,2)
            - y(i))^2;
end

J = 1/(2*m) * cost;

end

theta = gradientDescent(X,y,theta,alpha,iterations);

function [theta,J_history] = gradientDescent(X,y,theta,
                alpha,num_iters)

m = length(y);
J_history = zeros(num_iters,1);

for iter = 1 : num_iters
    cost_theta1 = 0;
    cost_theta2 = 0;
    for i = 1 : m
        cost_theta1 += (theta(1,1) * X(i,1) + 
            theta(2,1) * X(i,2) - y(i)) * X(i,1);
        cost_theta2 += (theta(1,1) * X(i,1) +
            theta(2,1) * X(i,2) - y(i)) * X(i,2);
    end
    new_theta1 = theta(1,1) - alpha*cost_theta1 * 1/m;
    new_theta2 = theta(2,1) - alpha*cost_theta2 * 1/m;
    theta(1,1) = new_theta1;
    theta(2,1) = new_theta2;

    J_history(iter) = computeCost(X,y,theta);
end
end