|
a |
|
b/Main.m |
|
|
1 |
|
|
|
2 |
clear; close all; clc |
|
|
3 |
|
|
|
4 |
%% Load Data |
|
|
5 |
data = load('data.txt'); |
|
|
6 |
X = data(:, 2:101); y = data(:, 1); |
|
|
7 |
data_test = load('datatest.txt'); |
|
|
8 |
X_test = data_test(:, 2:101); y_test = data_test(:, 1); |
|
|
9 |
%% ============ Compute Cost and Gradient ============ |
|
|
10 |
[m, n] = size(X); |
|
|
11 |
[m1, n1] = size(X_test); |
|
|
12 |
X = [ones(m, 1) X]; |
|
|
13 |
X_test = [ones(m1, 1) X_test]; |
|
|
14 |
initial_theta = zeros(n + 1, 1); |
|
|
15 |
|
|
|
16 |
% Compute and display initial cost and gradient |
|
|
17 |
[cost, grad] = costFunction(initial_theta, X, y); |
|
|
18 |
|
|
|
19 |
fprintf('Cost at initial theta (zeros): %f\n', cost); |
|
|
20 |
% fprintf('Gradient at initial theta (zeros): \n'); |
|
|
21 |
% fprintf(' %f \n', grad); |
|
|
22 |
|
|
|
23 |
|
|
|
24 |
|
|
|
25 |
%% ============= Optimizing using fminunc ============= |
|
|
26 |
options = optimset('GradObj', 'on', 'MaxIter', 400,'TolFun', 01e-10); |
|
|
27 |
[theta, cost] = ... |
|
|
28 |
fminunc(@(t)(costFunction(t, X, y)), initial_theta, options); |
|
|
29 |
|
|
|
30 |
fprintf('Cost at theta found by fminunc: %f\n', cost); |
|
|
31 |
% fprintf('theta: \n'); |
|
|
32 |
% fprintf(' %f \n', theta); |
|
|
33 |
|
|
|
34 |
|
|
|
35 |
|
|
|
36 |
%% ============== Predict and Accuracies ============== |
|
|
37 |
|
|
|
38 |
p = predict(theta, X); |
|
|
39 |
fprintf('Train Accuracy: %f\n', mean(double(p == y)) * 100); |
|
|
40 |
ptest = predict(theta, X_test); |
|
|
41 |
fprintf('Testing Accuracy: %f\n', mean(double(ptest == y_test)) * 100); |
|
|
42 |
|