You are on page 1of 3

Linear.

py
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt

class LinearRegression:

def __init__(self, init_theta=None, alpha=0.01, n_iter=100):


'''
Constructor
'''
self.alpha = alpha
self.n_iter = n_iter
self.theta = init_theta
self.JHist = None

def gradientDescent(self, X, y, theta):


'''
Fits the model via gradient descent
Arguments:
X is a n-by-d numpy matrix
y is an n-dimensional numpy vector
theta is a d-dimensional numpy vector
Returns:
the final theta found by gradient descent
'''
n,d = X.shape
self.JHist = []
for i in range(self.n_iter):
self.JHist.append( (self.computeCost(X, y, theta), theta) )
print( "Iteration: ", i+1, " Cost: ", self.JHist[i][0], " Th
eta: ", theta)
# TODO: add update equation here
theta = theta - self.alpha/n * np.dot(X.T,((X.dot(theta) - y
)))

return theta

def computeCost(self, X, y, theta):


'''
Computes the objective function
Arguments:
X is a n-by-d numpy matrix
y is an n-dimensional numpy vector
theta is a d-dimensional numpy vector
Returns:
a scalar value of the cost
** make certain you don't return a matrix with just one va
lue! **
'''
# TODO: add objective (cost) equation here
n,d = X.shape
cost = 1/(2*n) * np.dot(np.dot(X, theta).T, np.dot(X, theta - y)
)
return cost

def fit(self, X, y):


'''
Trains the model
Arguments:
X is a n-by-d numpy matrix
y is an n-dimensional numpy vector
'''
n = len(y)
n,d = X.shape
if self.theta==None:
self.theta = np.matrix(np.zeros((d,1)))
self.theta = self.gradientDescent(X,y,self.theta)

def predict(self, X):


'''
Used the model to predict values for each instance in X
Arguments:
X is a n-by-d numpy matrix
Returns:
an n-dimensional numpy vector of the predictions
'''
# TODO: add prediction function here
predict = np.dot(X, self.theta)
return predict
Kết quả test_linreg_univariate.py
Kết quả test_linreg_multivariate.py

You might also like