# 线性回归数学推导及Python实现

a、具体推导如下图所示：

b、Python实现

~矩阵解法

c、使用梯度下降对线性模型进行求解，具体代码如下所示

~梯度下降解法

defcost(theta0,theta1,theta2,x,y):

J=

m=len(x)

foriinrange(m):

h=theta0+theta1*x[i][]+theta2*x[i][1]

J+=(h-y[i])**2

J/=(2*m)

returnJ

defpart_theta0(theta0,theat1,theta2,x,y):

h=theta0+theat1*x[:,]+theta2*x[:,1]

diff=(h-y)

partial=diff.sum()/x.shape[]

returnpartial

defpart_theta1(theta0,theat1,theta2,x,y):

h=theta0+theat1*x[:,] +theta2*x[:,1]

diff=(h-y)*x[:,]

partial=diff.sum()/x.shape[]

returnpartial

defpart_theta2(theta0,theat1,theta2,x,y):

h=theta0+theat1*x[:,] +theta2*x[:,1]

diff=(h-y)*x[:,1]

partial=diff.sum()/x.shape[]

returnpartial

maxer=50000

counter=

c=cost(theta0,theta1,theta2,data_x,data_y)

costs=[c]

c1=c+10

theta0s=[theta0]

theta1s=[theta1]

theta2s=[theta2]

wucha=0.0000001

while(np.abs(c-c1)>wucha)and(counter

c1=c

update_theta0=aph*part_theta0(theta0,theta1,theta2,x,y)

update__theta1=aph*part_theta1(theta0,theta1,theta2,x,y)

update__theta2=aph*part_theta2(theta0,theta1,theta2,x,y)

theta0-=update_theta0

theta1-=update__theta1

theta2-=update__theta2

theta0s.append(theta0)

theta1s.append(theta1)

theta2s.append(theta2)

c=cost(theta0,theta1,theta2,data_x,data_y)

costs.append(c)

counter+=1

returntheta0,theta1,theta2,counter

defpredict(X):

print(counter)

y_pred=theta0+theta1*X[:,]+theta2*X[:,1]

returny_pred

print(predict(np.array([[20,5]])))

Excel 相关ＱＱ群：

Python QQ群：

• 发表于:
• 原文链接http://kuaibao.qq.com/s/20180113G017AV00?refer=cp_1026
• 腾讯「云+社区」是腾讯内容开放平台帐号（企鹅号）传播渠道之一，根据《腾讯内容开放平台服务协议》转载发布内容。

2018-06-19

2018-04-24

2020-02-20

2020-02-20

2020-02-20

2020-02-20