# Python机器学习（八）-一招见血

#1. 导入各种算法

import pandas as pd

from sklearn.linear_model import LinearRegression

from sklearn.ensemble import BaggingRegressor

from sklearn.tree import DecisionTreeRegressor

from sklearn.ensemble import RandomForestRegressor

from sklearn.ensemble import ExtraTreesRegressor

from sklearn.neighbors import KNeighborsRegressor

import xgboost as xgb

from sklearn.svm import SVR

from sklearn.neural_network import MLPRegressor

#2. 将各算法存放在models字典中

models = {}

models['LinearRegression'] = LinearRegression()

models['BaggingRegressor'] = BaggingRegressor()

models['KNeighborsRegressor'] = KNeighborsRegressor()

models['DecisionTreeRegressor'] = DecisionTreeRegressor()

models['RandomForestRegressor'] = RandomForestRegressor()

models['ExtraTreesRegressor'] = ExtraTreesRegressor()

models['xgboost'] = xgb.XGBRegressor()

models['SVM'] = SVR()

models['MLPRegressor']=MLPRegressor()

print(models) #可获得各种算法的默认参数

#3. 各算法评估

from sklearn.cross_validation import cross_val_score

results = []

for key in models:

cv_results=cross_val_score(models[key],x_train,y_train, cv=5)

print('%s: %.4f(%.4f)' %(key,cv_results.mean(),cv_results.std()))

#4. 运行结果

LinearRegression: 0.75(0.04)

BaggingRegressor: 0.81(0.04)

KNeighborsRegressor: 0.30(0.10)

DecisionTreeRegressor: 0.64(0.20)

RandomForestRegressor: 0.80(0.06)

ExtraTreesRegressor: 0.89(0.04)

xgboost: 0.83(0.05)

SVM: -0.02(0.04)

MLPRegressor: -353778.72(370539.08)

• 发表于:
• 原文链接https://kuaibao.qq.com/s/20180701G06CHH00?refer=cp_1026
• 腾讯「云+社区」是腾讯内容开放平台帐号（企鹅号）传播渠道之一，根据《腾讯内容开放平台服务协议》转载发布内容。
• 如有侵权，请联系 yunjia_community@tencent.com 删除。

2022-01-25

2022-01-25

2022-01-25

2022-01-25

2022-01-25

2022-01-25

2018-06-11

2018-05-18

2018-04-25

2022-01-25

2022-01-25

2022-01-25