|
82 | 82 | fprintf('Running gradient descent ...\n');
|
83 | 83 |
|
84 | 84 | % Choose some alpha value
|
| 85 | +alpha_histories = zeros(10,1) |
85 | 86 | alpha = .01;
|
86 | 87 | num_iters = 150;
|
87 |
| - |
88 |
| -% Init Theta and Run Gradient Descent |
89 |
| -J_history = zeros(10, 2, length(X)) |
90 |
| -for i = length(J_history) |
91 |
| - theta = zeros(3, 1); |
92 |
| - J_history(i) = gradientDescentMulti(X, y, theta, alpha, num_iters); % returns [theta, J_history] |
93 |
| - alpha = alpha*3 |
| 88 | +theta = zeros(3, 1) |
| 89 | + |
| 90 | +% Init Theta and Run Gradient Descent 10 times with varying alphas, saving results |
| 91 | +J_histories = [10, 2] |
| 92 | +for i = length(J_histories) |
| 93 | + J_histories(i, 1:2) = gradientDescentMulti(X, y, theta, alpha, num_iters); % returns [theta, J_history] |
| 94 | + alpha_histories(i) = alpha |
| 95 | + alpha = alpha * 3 |
94 | 96 | end
|
95 | 97 |
|
96 | 98 | % Plot the convergence graph
|
97 | 99 | figure;
|
98 | 100 | xlabel('Number of iterations');
|
99 | 101 | ylabel('Cost J');
|
100 | 102 |
|
101 |
| -for hist = 1:numel(J_history) |
102 |
| - plot(1:numel(J_history(hist)), J_history(hist), '-b', 'LineWidth', 2); |
103 |
| - if hist == 1 |
104 |
| - hold on; |
105 |
| - end |
| 103 | +for hist = 1:length(J_histories) |
| 104 | + x = [1:length(J_histories(hist, 2))] |
| 105 | + y = J_histories(hist, 2) |
| 106 | + plot(1:numel(J_histories(hist)()), J_history(hist), '-b', 'LineWidth', 2); |
| 107 | + hold on; |
106 | 108 | end
|
107 | 109 |
|
108 | 110 | % Display gradient descent's result
|
|
0 commit comments