Skip to content

Commit

Permalink
ex5 sol
Browse files Browse the repository at this point in the history
  • Loading branch information
szihs committed Feb 8, 2018
1 parent 5727715 commit b2e204a
Show file tree
Hide file tree
Showing 9 changed files with 18 additions and 5 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
token.mat
Binary file removed Assignments/machine-learning-ex1/ex1/token.mat
Binary file not shown.
Binary file removed Assignments/machine-learning-ex2/ex2/token.mat
Binary file not shown.
Binary file removed Assignments/machine-learning-ex3/ex3/token.mat
Binary file not shown.
Binary file removed Assignments/machine-learning-ex4/ex4/token.mat
Binary file not shown.
8 changes: 5 additions & 3 deletions Assignments/machine-learning-ex5/ex5/learningCurve.m
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,11 @@

% ---------------------- Sample Solution ----------------------




for i = 1:m
[theta_i] = trainLinearReg(X(1:i, :), y(1:i, :), lambda);
[error_train(i) gradTrain] = linearRegCostFunction(X(1:i, :), y(1:i, :), theta_i, 0);
[error_val(i) gradVal] = linearRegCostFunction(Xval, yval, theta_i, 0);
end



Expand Down
3 changes: 3 additions & 0 deletions Assignments/machine-learning-ex5/ex5/linearRegCostFunction.m
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@
%


h =X*theta;
J = sum((h-y)'* (h-y))./ (2*m) + sum ( (theta(2:end).^2) )* lambda / (2*m);
grad = ((h-y)'* X)./m + lambda * ([0; theta(2:end)]')./m; % note transpose of second term, as it should be 1 xp



Expand Down
4 changes: 3 additions & 1 deletion Assignments/machine-learning-ex5/ex5/polyFeatures.m
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@
%
%


for i = 1:p
X_poly(:, i) = X.^i;
end



Expand Down
7 changes: 6 additions & 1 deletion Assignments/machine-learning-ex5/ex5/validationCurve.m
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,12 @@
% end
%
%

for i = 1:length(lambda_vec)
lambda = lambda_vec(i);
[theta_i] = trainLinearReg(X, y, lambda);
[error_train(i) gradTrain] = linearRegCostFunction(X, y, theta_i, 0);
[error_val(i) gradVal] = linearRegCostFunction(Xval, yval, theta_i, 0);
end



Expand Down

0 comments on commit b2e204a

Please sign in to comment.