Skip to content

Commit 717a07a

Browse files
Initial Commit with Finished Week 1&2 Exercises
0 parents  commit 717a07a

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+7399
-0
lines changed

.gitignore

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
# Windows default autosave extension
2+
*.asv
3+
4+
# OSX / *nix default autosave extension
5+
*.m~
6+
7+
# Compiled MEX binaries (all platforms)
8+
*.mex*
9+
10+
# Packaged app and toolbox files
11+
*.mlappinstall
12+
*.mltbx
13+
14+
# Generated helpsearch folders
15+
helpsearch*/
16+
17+
# Simulink code generation folders
18+
slprj/
19+
sccprj/
20+
21+
# Simulink autosave extension
22+
*.autosave
23+
24+
# Octave session info
25+
octave-workspace
26+
27+
# OSX Directory Files
28+
.DS_Store
29+
30+
# Personal Data
31+
*.mat

machine-learning-ex1.zip

496 KB
Binary file not shown.

machine-learning-ex1/ex1.pdf

478 KB
Binary file not shown.
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
function J = computeCost(X, y, theta)
2+
%COMPUTECOST Compute cost for linear regression
3+
% J = COMPUTECOST(X, y, theta) computes the cost of using theta as the
4+
% parameter for linear regression to fit the data points in X and y
5+
6+
% Initialize some useful values
7+
m = length(y); % number of training examples
8+
9+
% You need to return the following variables correctly
10+
J = 0;
11+
12+
% ====================== YOUR CODE HERE ======================
13+
% Instructions: Compute the cost of a particular choice of theta
14+
% You should set J to the cost.
15+
16+
H = X * theta;
17+
J = ((H - y).^2) / (2 * m);
18+
J = sum(J);
19+
20+
21+
% =========================================================================
22+
23+
end
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
function J = computeCostMulti(X, y, theta)
2+
%COMPUTECOSTMULTI Compute cost for linear regression with multiple variables
3+
% J = COMPUTECOSTMULTI(X, y, theta) computes the cost of using theta as the
4+
% parameter for linear regression to fit the data points in X and y
5+
6+
% Initialize some useful values
7+
m = length(y); % number of training examples
8+
9+
% You need to return the following variables correctly
10+
J = 0;
11+
12+
% ====================== YOUR CODE HERE ======================
13+
% Instructions: Compute the cost of a particular choice of theta
14+
% You should set J to the cost.
15+
H = X * theta;
16+
J = ((H - y).^2) / (2 * m);
17+
J = sum(J);
18+
19+
20+
% =========================================================================
21+
22+
end

machine-learning-ex1/ex1/ex1.m

Lines changed: 135 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,135 @@
1+
%% Machine Learning Online Class - Exercise 1: Linear Regression
2+
3+
% Instructions
4+
% ------------
5+
%
6+
% This file contains code that helps you get started on the
7+
% linear exercise. You will need to complete the following functions
8+
% in this exericse:
9+
%
10+
% warmUpExercise.m
11+
% plotData.m
12+
% gradientDescent.m
13+
% computeCost.m
14+
% gradientDescentMulti.m
15+
% computeCostMulti.m
16+
% featureNormalize.m
17+
% normalEqn.m
18+
%
19+
% For this exercise, you will not need to change any code in this file,
20+
% or any other files other than those mentioned above.
21+
%
22+
% x refers to the population size in 10,000s
23+
% y refers to the profit in $10,000s
24+
%
25+
26+
%% Initialization
27+
clear ; close all; clc
28+
29+
%% ==================== Part 1: Basic Function ====================
30+
% Complete warmUpExercise.m
31+
fprintf('Running warmUpExercise ... \n');
32+
fprintf('5x5 Identity Matrix: \n');
33+
warmUpExercise()
34+
35+
fprintf('Program paused. Press enter to continue.\n');
36+
pause;
37+
38+
39+
%% ======================= Part 2: Plotting =======================
40+
fprintf('Plotting Data ...\n')
41+
data = load('ex1data1.txt');
42+
X = data(:, 1); y = data(:, 2);
43+
m = length(y); % number of training examples
44+
45+
% Plot Data
46+
% Note: You have to complete the code in plotData.m
47+
plotData(X, y);
48+
49+
fprintf('Program paused. Press enter to continue.\n');
50+
pause;
51+
52+
%% =================== Part 3: Cost and Gradient descent ===================
53+
54+
X = [ones(m, 1), data(:,1)]; % Add a column of ones to x
55+
theta = zeros(2, 1); % initialize fitting parameters
56+
57+
% Some gradient descent settings
58+
iterations = 1500;
59+
alpha = 0.01;
60+
61+
fprintf('\nTesting the cost function ...\n')
62+
% compute and display initial cost
63+
J = computeCost(X, y, theta);
64+
fprintf('With theta = [0 ; 0]\nCost computed = %f\n', J);
65+
fprintf('Expected cost value (approx) 32.07\n');
66+
67+
% further testing of the cost function
68+
J = computeCost(X, y, [-1 ; 2]);
69+
fprintf('\nWith theta = [-1 ; 2]\nCost computed = %f\n', J);
70+
fprintf('Expected cost value (approx) 54.24\n');
71+
72+
fprintf('Program paused. Press enter to continue.\n');
73+
pause;
74+
75+
fprintf('\nRunning Gradient Descent ...\n')
76+
% run gradient descent
77+
theta = gradientDescent(X, y, theta, alpha, iterations);
78+
79+
% print theta to screen
80+
fprintf('Theta found by gradient descent:\n');
81+
fprintf('%f\n', theta);
82+
fprintf('Expected theta values (approx)\n');
83+
fprintf(' -3.6303\n 1.1664\n\n');
84+
85+
% Plot the linear fit
86+
hold on; % keep previous plot visible
87+
plot(X(:,2), X*theta, '-')
88+
legend('Training data', 'Linear regression')
89+
hold off % don't overlay any more plots on this figure
90+
91+
% Predict values for population sizes of 35,000 and 70,000
92+
predict1 = [1, 3.5] *theta;
93+
fprintf('For population = 35,000, we predict a profit of %f\n',...
94+
predict1*10000);
95+
predict2 = [1, 7] * theta;
96+
fprintf('For population = 70,000, we predict a profit of %f\n',...
97+
predict2*10000);
98+
99+
fprintf('Program paused. Press enter to continue.\n');
100+
pause;
101+
102+
%% ============= Part 4: Visualizing J(theta_0, theta_1) =============
103+
fprintf('Visualizing J(theta_0, theta_1) ...\n')
104+
105+
% Grid over which we will calculate J
106+
theta0_vals = linspace(-10, 10, 100);
107+
theta1_vals = linspace(-1, 4, 100);
108+
109+
% initialize J_vals to a matrix of 0's
110+
J_vals = zeros(length(theta0_vals), length(theta1_vals));
111+
112+
% Fill out J_vals
113+
for i = 1:length(theta0_vals)
114+
for j = 1:length(theta1_vals)
115+
t = [theta0_vals(i); theta1_vals(j)];
116+
J_vals(i,j) = computeCost(X, y, t);
117+
end
118+
end
119+
120+
121+
% Because of the way meshgrids work in the surf command, we need to
122+
% transpose J_vals before calling surf, or else the axes will be flipped
123+
J_vals = J_vals';
124+
% Surface plot
125+
figure;
126+
surf(theta0_vals, theta1_vals, J_vals)
127+
xlabel('\theta_0'); ylabel('\theta_1');
128+
129+
% Contour plot
130+
figure;
131+
% Plot J_vals as 15 contours spaced logarithmically between 0.01 and 100
132+
contour(theta0_vals, theta1_vals, J_vals, logspace(-2, 3, 20))
133+
xlabel('\theta_0'); ylabel('\theta_1');
134+
hold on;
135+
plot(theta(1), theta(2), 'rx', 'MarkerSize', 10, 'LineWidth', 2);

machine-learning-ex1/ex1/ex1_multi.m

Lines changed: 160 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,160 @@
1+
%% Machine Learning Online Class
2+
% Exercise 1: Linear regression with multiple variables
3+
%
4+
% Instructions
5+
% ------------
6+
%
7+
% This file contains code that helps you get started on the
8+
% linear regression exercise.
9+
%
10+
% You will need to complete the following functions in this
11+
% exericse:
12+
%
13+
% warmUpExercise.m
14+
% plotData.m
15+
% gradientDescent.m
16+
% computeCost.m
17+
% gradientDescentMulti.m
18+
% computeCostMulti.m
19+
% featureNormalize.m
20+
% normalEqn.m
21+
%
22+
% For this part of the exercise, you will need to change some
23+
% parts of the code below for various experiments (e.g., changing
24+
% learning rates).
25+
%
26+
27+
%% Initialization
28+
29+
%% ================ Part 1: Feature Normalization ================
30+
31+
%% Clear and Close Figures
32+
clear ; close all; clc
33+
34+
fprintf('Loading data ...\n');
35+
36+
%% Load Data
37+
data = load('ex1data2.txt');
38+
X = data(:, 1:2);
39+
y = data(:, 3);
40+
m = length(y);
41+
42+
% Print out some data points
43+
fprintf('First 10 examples from the dataset: \n');
44+
fprintf(' x = [%.0f %.0f], y = %.0f \n', [X(1:10,:) y(1:10,:)]');
45+
46+
fprintf('Program paused. Press enter to continue.\n');
47+
pause;
48+
49+
% Scale features and set them to zero mean
50+
fprintf('Normalizing Features ...\n');
51+
52+
[X mu sigma] = featureNormalize(X);
53+
54+
% Add intercept term to X
55+
X = [ones(m, 1) X];
56+
57+
58+
%% ================ Part 2: Gradient Descent ================
59+
60+
% ====================== YOUR CODE HERE ======================
61+
% Instructions: We have provided you with the following starter
62+
% code that runs gradient descent with a particular
63+
% learning rate (alpha).
64+
%
65+
% Your task is to first make sure that your functions -
66+
% computeCost and gradientDescent already work with
67+
% this starter code and support multiple variables.
68+
%
69+
% After that, try running gradient descent with
70+
% different values of alpha and see which one gives
71+
% you the best result.
72+
%
73+
% Finally, you should complete the code at the end
74+
% to predict the price of a 1650 sq-ft, 3 br house.
75+
%
76+
% Hint: By using the 'hold on' command, you can plot multiple
77+
% graphs on the same figure.
78+
%
79+
% Hint: At prediction, make sure you do the same feature normalization.
80+
%
81+
82+
fprintf('Running gradient descent ...\n');
83+
84+
% Choose some alpha value
85+
alpha = 0.5;
86+
num_iters = 40;
87+
88+
% Init Theta and Run Gradient Descent
89+
theta = zeros(3, 1);
90+
[theta, J_history] = gradientDescentMulti(X, y, theta, alpha, num_iters);
91+
92+
% Plot the convergence graph
93+
figure;
94+
plot(1:numel(J_history), J_history, '-b', 'LineWidth', 2);
95+
xlabel('Number of iterations');
96+
ylabel('Cost J');
97+
98+
% Display gradient descent's result
99+
fprintf('Theta computed from gradient descent: \n');
100+
fprintf(' %f \n', theta);
101+
fprintf('\n');
102+
103+
% Estimate the price of a 1650 sq-ft, 3 br house
104+
% ====================== YOUR CODE HERE ======================
105+
% Recall that the first column of X is all-ones. Thus, it does
106+
% not need to be normalized.
107+
price = 0; % You should change this
108+
houseData = [1, (1650 - mu(1)) / sigma(1), ( 3 - mu(2)) / sigma(2)];
109+
price = houseData * theta;
110+
% ============================================================
111+
112+
fprintf(['Predicted price of a 1650 sq-ft, 3 br house ' ...
113+
'(using gradient descent):\n $%f\n'], price);
114+
115+
fprintf('Program paused. Press enter to continue.\n');
116+
pause;
117+
118+
%% ================ Part 3: Normal Equations ================
119+
120+
fprintf('Solving with normal equations...\n');
121+
122+
% ====================== YOUR CODE HERE ======================
123+
% Instructions: The following code computes the closed form
124+
% solution for linear regression using the normal
125+
% equations. You should complete the code in
126+
% normalEqn.m
127+
%
128+
% After doing so, you should complete this code
129+
% to predict the price of a 1650 sq-ft, 3 br house.
130+
%
131+
132+
%% Load Data
133+
data = csvread('ex1data2.txt');
134+
X = data(:, 1:2);
135+
y = data(:, 3);
136+
m = length(y);
137+
138+
% Add intercept term to X
139+
X = [ones(m, 1) X];
140+
141+
% Calculate the parameters from the normal equation
142+
theta = normalEqn(X, y);
143+
144+
% Display normal equation's result
145+
fprintf('Theta computed from the normal equations: \n');
146+
fprintf(' %f \n', theta);
147+
fprintf('\n');
148+
149+
150+
% Estimate the price of a 1650 sq-ft, 3 br house
151+
% ====================== YOUR CODE HERE ======================
152+
price = 0; % You should change this
153+
houseData = [1, 1650, 3];
154+
price = houseData * theta;
155+
156+
% ============================================================
157+
158+
fprintf(['Predicted price of a 1650 sq-ft, 3 br house ' ...
159+
'(using normal equations):\n $%f\n'], price);
160+

0 commit comments

Comments
 (0)