-
Notifications
You must be signed in to change notification settings - Fork 48
/
Module-4-Example-7.1.R
68 lines (47 loc) · 1.5 KB
/
Module-4-Example-7.1.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
# https://www.r-bloggers.com/linear-regression-by-gradient-descent/
# generate random data in which y is a noisy function of x
x <- runif(1000, -5, 5)
y <- x + rnorm(1000) + 3
# Look at the scotterplot
plot(y, x)
# fit a linear model
res <- lm( y ~ x )
print(res)
# plot the data and the model
plot(x,y, col=rgb(0.2,0.4,0.6,0.4), main='Linear Regression ')
abline(res, col='blue')
# squared error cost function
cost <- function(X, y, theta) {
sum( (X %*% theta - y)^2 ) / (2*length(y))
}
# learning rate and iteration limit
alpha <- 0.01
num_iters <- 1000
# keep history
cost_history <- double(num_iters)
theta_history <- list(num_iters)
# initialize coefficients
theta <- matrix(c(0,0), nrow=2)
# add a column of 1's for the intercept coefficient
X <- cbind(1, matrix(x))
AllXandY <- cbind(y, X)
sampleData <- sample(AllXandY, 10)
# gradient descent
for (i in 1:num_iters) {
error <- (X %*% theta - y)
delta <- t(X) %*% error / length(y)
theta <- theta - alpha * delta
cost_history[i] <- cost(X, y, theta)
print(cost_history[i])
print(paste( i , paste(" , " ,theta)))
theta_history[[i]] <- theta
# Sys.sleep(1)
}
print(theta)
# plot data and converging fit
plot(x,y, col=rgb(0.2,0.4,0.6,0.4), main='Linear regression by gradient descent')
for (i in c(1,3,6,10,14,seq(20,num_iters,by=10))) {
abline(coef=theta_history[[i]], col=rgb(0.8,0,0,0.3))
}
abline(coef=theta, col='blue')
plot(cost_history, type='line', col='blue', lwd=2, main='Cost function', ylab='cost', xlab='Iterations')