# 多元线性回归函数

h(xi)=b0+b1xi1+b2xi2++bpxip

h(xi)=b0+b1xi1+b2xi2++bpxip+ei

yi=h(xi)+eiorei=yi-h(xi)

## Python代码实现

%matplotlib inline
import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets, linear_model, metrics

boston=datasets.load_boston(return_X_y=False)

X=boston.data
y=boston.target

from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test=train_test_split(X, y, test_size=0.7, random_state=1)

reg = linear_model.LinearRegression()
reg.fit(X_train, y_train)
print('Coefficients:\n', reg.coef_)
print('Variance score: {}'.format(reg.score(X_test, y_test)))
plt.style.use('fivethirtyeight')
plt.scatter(reg.predict(X_train), reg.predict(X_train) - y_train, color = "green", s = 10, label = 'Train data')
plt.scatter(reg.predict(X_test), reg.predict(X_test) - y_test, color = "blue", s = 10, label = 'Test data')
plt.hlines(y = 0, xmin = 0, xmax = 50, linewidth = 2)
plt.legend(loc = 'upper right')
plt.title("Residual errors")
plt.show()

Coefficients:
[-1.16358797e-01 6.44549228e-02 1.65416147e-01 1.45101654e+00 -1.77862563e+01
2.80392779e+00 4.61905315e-02 -1.13518865e+00 3.31725870e-01 -1.01196059e-02
-9.94812678e-01 9.18522056e-03 -7.92395217e-01]
Variance score: 0.709454060230326

## 视频推荐

Python机器学习 - 02-线性模型