-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathLinearRegression_House.m
More file actions
86 lines (69 loc) · 1.49 KB
/
LinearRegression_House.m
File metadata and controls
86 lines (69 loc) · 1.49 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
%housing with linear regression
TRO=load('housing_train.txt');
TEO=load('housing_test.txt');
row1 = size(TRO,1);
row2 = size(TEO,1);
Z1 = ones(row1,1);
TRAIN = [Z1,TRO];
Z2 = ones(row2,1);
TEST = [Z2,TEO];
column = size(TRAIN,2);
e = exp(1);
%normalize
NORMAL = []; %column 1 = min, column 2 = max
for m = 2:column - 1
mi = min(TRAIN(:,m));
for n = 1:row1
TRAIN(n,m) = TRAIN(n,m) - mi;
end
ma = max(TRAIN(:,m));
for n = 1:row1
TRAIN(n,m) = TRAIN(n,m)/ma;
end
NORMAL(m,1) = mi;
NORMAL(m,2) = ma;
end
for m = 2:column - 1
mi = NORMAL(m,1);
for n = 1:row2
TEST(n,m) = TEST(n,m) - mi;
end
ma = NORMAL(m,2);
for n = 1:row2
TEST(n,m) = TEST(n,m)/ma;
end
end
%training
X = TRAIN(1:row1,1:column-1);
Y = TRAIN(1:row1,column:column);
theta=zeros(column-1,1);
pre = X * theta;
lam = 0.001;
XT = X';
for n = 1:1000
for i = 1:row1
pre_tmp = pre(i, 1);
npre = - pre_tmp;
ne = e^npre;
h = 1/(1+ne);
Y_tmp = Y(i,1);
theta = theta - lam * (pre_tmp - Y_tmp)*XT(:,i);
end
pre = X * theta;
sum = 0;
for i = 1:row1
sum = sum + (pre(i,1)-Y(i,1))^2;
end
avg = sum/row1;
end
fprintf(1,'TRAIN MSE: %g\n',avg);
%testing
X1 = TEST(1:row2,1:column-1);
Y1 = TEST(1:row2,column:column);
H = X1*theta;
sum2 = 0;
for i = 1:row2
sum2 = sum2 + (H(i,1)-Y1(i,1))^2;
end
avg2 = sum2/row2;
fprintf(2,'TEST MSE: %g\n',avg2);