[python]多元迴歸

pandas

statsmodels

https://www.statsmodels.org/stable/regression.html
除了下面代碼用的ols(最小二乘法以外),還有
OLS(endog[, exog, missing, hasconst]) A simple ordinary least squares model.
GLS(endog, exog[, sigma, missing, hasconst]) Generalized least squares model with a general covariance structure.
WLS(endog, exog[, weights, missing, hasconst]) A regression model with diagonal but non-identity covariance structure.
GLSAR(endog[, exog, rho, missing]) A regression model with an AR§ covariance structure.
yule_walker(X[, order, method, df, inv, demean]) Estimate AR§ parameters from a sequence X using Yule-Walker equation.
QuantReg(endog, exog, **kwargs) Quantile Regression
RecursiveLS(endog, exog, **kwargs) Recursive least squares

import pandas as pd
import statsmodels.api as sm
import pylab as pl
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn import metrics
from sklearn import preprocessing
from sklearn.metrics import r2_score
from sklearn.metrics import mean_squared_error
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
import sys

df = pd.read_csv("t_traindata000")
df.columns = ['mesh500mid', 'season', 'period', 'dayflag', 'pop']
print df.head()
# drop_fisrt(to avoid multicollinearity)
season_dummy = pd.get_dummies(df['season'], prefix='season',drop_first=True)
period_dummy = pd.get_dummies(df['period'], prefix='period',drop_first=True)
cols_to_keep = ['pop', 'mesh500mid', 'dayflag']
data = df[cols_to_keep].join(season_dummy.ix[:, 'season_1':]).join(period_dummy.ix[:, 'period_1':])
data.head()
#data['intercept'] = 1.0

train_cols = data.columns[2:] 
#logit = sm.Logit(data['pop'], data[train_cols])
 

#----------------

#-----------------
	
data_cur=data[data['mesh500mid']== 533941604] #533944882
X = data_cur[train_cols]
y = data_cur['pop']
#X = preprocessing.scale(X)
X_train,X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2,random_state=100)
print ('X_train.shape={}\n y_train.shape ={}\n X_test.shape={}\n,  y_test.shape={}'.format(X_train.shape,y_train.shape, X_test.shape,y_test.shape))

## model1: sklearn
model1 = LinearRegression()
model1.fit(X_train, y_train) #train
#print('Model score: %f' %(model1.score(X_test,y_test)))

## model2: sm
X=sm.add_constant(X)
model2 = sm.OLS(y_train,X_train).fit() 
#model2.summary()

model = model2

## Prediction
y_test_pred = model.predict(X_test)
y_train_pred = model.predict(X_train)

## Evaluation
# MSE
print ('MSE train: %.3f,test:%.3f' % (mean_squared_error(y_train,y_train_pred),mean_squared_error(y_test,y_test_pred)))
# R^2 (coefficient of determination) regression score function.
print ('R^2 train: %.3f,test:%.3f' % (r2_score(y_train,y_train_pred),r2_score(y_test,y_test_pred)))

model2.summary()的r2-squared 的結果要比用r2_score算出來的結果大,原因如下:
https://stats.stackexchange.com/questions/267325/why-does-statsmodels-api-ols-over-report-the-r-squared-value

增加指數項

作者:搬磚小工053
來源:CSDN
原文:https://blog.csdn.net/SA14023053/article/details/51703204

import matplotlib.pyplot as plt
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
def runplt():
    plt.figure()
    plt.title(u'diameter-cost curver')
    plt.xlabel(u'diameter')
    plt.ylabel(u'cost')
    plt.axis([0, 25, 0, 25])
    plt.grid(True)
    return plt
X_train = [[6], [8], [10], [14], [18]]
y_train = [[7], [9], [13], [17.5], [18]]
X_test = [[6], [8], [11], [16]]
y_test = [[8], [12], [15], [18]]
regressor = LinearRegression()
regressor.fit(X_train, y_train)
xx = np.linspace(0, 26, 100)
yy = regressor.predict(xx.reshape(xx.shape[0], 1))
plt = runplt()
plt.plot(X_train, y_train, 'k.')
plt.plot(xx, yy)

quadratic_featurizer = PolynomialFeatures(degree=2)
X_train_quadratic = quadratic_featurizer.fit_transform(X_train)
X_test_quadratic = quadratic_featurizer.transform(X_test)
regressor_quadratic = LinearRegression()
regressor_quadratic.fit(X_train_quadratic, y_train)
xx_quadratic = quadratic_featurizer.transform(xx.reshape(xx.shape[0], 1))
plt.plot(xx, regressor_quadratic.predict(xx_quadratic), 'r-')
plt.show()
print(X_train)
print(X_train_quadratic)
print(X_test)
print(X_test_quadratic)
print('1 r-squared', regressor.score(X_test, y_test))
print('2 r-squared', regressor_quadratic.score(X_test_quadratic, y_test))

在這裏插入圖片描述

下面這兩種結果一樣

quadratic_featurizer = PolynomialFeatures(degree=2)
X_train_quadratic = quadratic_featurizer.fit_transform(X_train)
X_test_quadratic = quadratic_featurizer.transform(X_test)
#爲啥fit_transform和transform一起用不知道。。
model = LinearRegression()
model.fit(X_train_quadratic, y_train)
y_test_pred = model.predict(X_test_quadratic)
y_train_pred = model.predict(X_train_quadratic)
model =make_pipeline(PolynomialFeatures(2), LinearRegression())
model.fit(X_train, y_train)
y_test_pred = model.predict(X_test)
y_train_pred = model.predict(X_train)

詳細一點的註釋

http://www.shareditor.com/blogshow?blogId=56

# coding:utf-8

import sys
reload(sys)
sys.setdefaultencoding( "utf-8" )

import matplotlib.pyplot as plt
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures

plt.figure() # 實例化作圖變量
plt.title('single variable') # 圖像標題
plt.xlabel('x') # x軸文本
plt.ylabel('y') # y軸文本
plt.axis([30, 400, 100, 400])
plt.grid(True) # 是否繪製網格線

X = [[50],[100],[150],[200],[250],[300]]
y = [[150],[200],[250],[280],[310],[330]]
X_test = [[250],[300]] # 用來做最終效果測試
y_test = [[310],[330]] # 用來做最終效果測試
plt.plot(X, y, 'k.')

model = LinearRegression()
model.fit(X, y)
X2 = [[30], [400]]
y2 = model.predict(X2)
plt.plot(X2, y2, 'g-')

xx = np.linspace(30, 400, 100) # 設計x軸一系列點作爲畫圖的x點集
quadratic_featurizer = PolynomialFeatures(degree=2) # 實例化一個二次多項式特徵實例
X_train_quadratic = quadratic_featurizer.fit_transform(X) # 用二次多項式對樣本X值做變換
xx_quadratic = quadratic_featurizer.transform(xx.reshape(xx.shape[0], 1)) # 把訓練好X值的多項式特徵實例應用到一系列點上,形成矩陣
regressor_quadratic = LinearRegression() # 創建一個線性迴歸實例
regressor_quadratic.fit(X_train_quadratic, y) # 以多項式變換後的x值爲輸入,代入線性迴歸模型做訓練
plt.plot(xx, regressor_quadratic.predict(xx_quadratic), 'r-') # 用訓練好的模型作圖

print '一元線性迴歸 r-squared', model.score(X_test, y_test)
X_test_quadratic = quadratic_featurizer.transform(X_test)
print '二次迴歸     r-squared', regressor_quadratic.score(X_test_quadratic, y_test)

plt.show() # 展示圖像
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章