import pandas as pd import numpy as np import matplotlib.pyplot as plt mtcars = pd.read_csv("mtcars.csv", index_col = 0) mtcars
x = mtcars["hp"] y = mtcars["mpg"] plt.plot(x,y,"*") plt.grid(True) plt.xlabel("Horse Power") plt.ylabel("Miles per Gallon") plt.show()
def standardize(x): return (x-x.mean())/x.std(), x.mean(), x.std() x, muX, stdX = standardize(x) y, muY, stdY = standardize(y)
if len(x.shape) == 1: num_var = 1 else: num_var = x.shape[1]
beta0 = np.random.rand() beta1 = np.random.rand() def predict(x, beta0, beta1): return beta0 + beta1*x def loss(y, ypred): return np.mean((y-ypred)**2)/2 def gradient(y, ypred,x): grad_beta0 = np.mean((ypred-y)*1) grad_beta1 = np.mean((ypred-y)*x) return grad_beta0, grad_beta1 def update_param(beta0, beta1, grad_beta0, grad_beta1, alpha): new_beta0 = beta0 - alpha*grad_beta0 new_beta1 = beta1 - alpha*grad_beta1 return new_beta0, new_beta1
num_iter = 1000 alpha = 0.01 J_list = [] print(beta0) print(beta1) for i in range(num_iter): ypred = predict(x, beta0, beta1) J = loss(y,ypred) J_list.append(J) grad_beta0, grad_beta1 = gradient(y,ypred,x) beta0, beta1 = update_param(beta0,beta1,grad_beta0, grad_beta1, alpha) print(beta0) print(beta1)
plt.plot(J_list) plt.show()
plt.plot(x,y,"*") plt.grid(True) plt.xlabel("Horse Power") plt.ylabel("Miles per Gallon") plt.plot(x,ypred,"-ro") plt.show()
x = mtcars[["hp","disp"]] y = mtcars["mpg"] x.head()
def standardize(x): return (x-x.mean())/x.std(), x.mean(), x.std() x, muX, stdX = standardize(x) y, muY, stdY = standardize(y) if len(x.shape) == 1: num_var = 1 else: num_var = x.shape[1] beta0 = np.random.rand() beta = np.random.rand(num_var) def predict(x, beta0, beta): return beta0 + beta[0]*x.iloc[:,0] + beta[1]*x.iloc[:,1] # np.multiply, sum def loss(y, ypred): return np.mean((y-ypred)**2)/2 def gradient(y, ypred,x, num_var): grad_beta0 = np.mean((ypred-y)*1) grad_beta = np.zeros(num_var) grad_beta[0] = np.mean((ypred-y)*x.iloc[:,0]) grad_beta[1] = np.mean((ypred-y)*x.iloc[:,1]) return grad_beta0, grad_beta def update_param(beta0, beta, grad_beta0, grad_beta, alpha): new_beta0 = beta0 - alpha*grad_beta0 new_beta = np.zeros(len(beta)) new_beta[0] = beta[0] - alpha*grad_beta[0] new_beta[1] = beta[1] - alpha*grad_beta[1] return new_beta0, new_beta
num_iter = 2000 alpha = 0.01 J_list = [] for i in range(num_iter): ypred = predict(x, beta0, beta) J = loss(y,ypred) J_list.append(J) grad_beta0, grad_beta = gradient(y,ypred,x,num_var) beta0, beta = update_param(beta0,beta,grad_beta0, grad_beta, alpha) print(beta0) print(beta)
plt.plot(J_list) plt.show()
PYTHON-
the file, it is shown how to find the values of the
parameters of Linear Regression for mtcars data in case of "one and
two independent variables (input variables)" using the Gradient
Descent algorithm. You are asked to make the algorithm work for k
arguments here. In other words, instead of using a separate piece
of code for a variable and a separate piece of code for 2
variables, the necessary calculations can be made with a single
piece of code in both (1 and 2 are given here as examples).
You can prepare by making the necessary changes on the "the code"
file.
Information on Gradient Descent Algorithm can be found here at:
https://machinelearningmastery.com/gradient-descent-for-machine-learning/
Answers below points:
Below code can be useful, it is in C++. This shall be helpful.
int k = 0;
for (int j = 0; j < 100; j++) { // Added to get the right
value.
k = 0;
while (k < 10) {
for (int i = 0; i < 3; i++){
b[i] = b[i] + alpha * (y[k] – prediction) * prediction * (1 –
prediction)* x[k][i];
cout << b[i]<<"\n";
}
k++;
output = 0;
for (int i = 0; i < 3; i++)
output += b[i] * x[k][i];
prediction = 1 / (1 + exp(-output));
}
}
Get Answers For Free
Most questions answered within 1 hours.