-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmultiple_linear_regression_self.py
61 lines (47 loc) · 1.67 KB
/
multiple_linear_regression_self.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
#multiple linear regression
#importing libraries
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#importing data set
data=pd.read_csv("50_Startups.csv")
X=data.iloc[:,0:4].values
y=data.iloc[:,4:].values
#encoding the string type
from sklearn.preprocessing import LabelEncoder ,OneHotEncoder
labelencoder_X=LabelEncoder()
X[:,3]=labelencoder_X.fit_transform(X[:,3])
onehotencoder_X=OneHotEncoder(categorical_features=[3])
X=onehotencoder_X.fit_transform(X).toarray()
#scaling dummy variable
X=X[:,1:]
#creating training ans test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test=train_test_split(X,y,test_size=0.2)
#fitting to training set
from sklearn.linear_model import LinearRegression
regression=LinearRegression()
regression.fit(X_train,y_train)
regression.score(X_train,y_train) #to get R^2 of model
#predictiing the result
y_predict=regression.predict(X_test)
#optimizing model using backward elimination
import statsmodels.formula.api as mp
X=np.append(arr =np.ones((50,1),dtype=int,),values=X,axis=1)
#iteration to perform backard propagation
X_opt=X[:,[0,1,2,3,4,5]]
regressor_ols=mp.OLS(endog=y,exog=X_opt).fit()
regressor_ols.summary()
X_opt=X[:,[0,1,3,4,5]]
regressor_ols=mp.OLS(endog=y,exog=X_opt).fit()
regressor_ols.summary()
X_opt=X[:,[0,3,4,5]]
regressor_ols=mp.OLS(endog=y,exog=X_opt).fit()
regressor_ols.summary()
X_opt=X[:,[0,3,5]]
regressor_ols=mp.OLS(endog=y,exog=X_opt).fit()
regressor_ols.summary()
regressor_ols.score(y,X_opt)
X_opt=X[:,[0,5]]
regressor_ols=mp.OLS(endog=y,exog=X_opt).fit()
regressor_ols.summary()