TVP

# 简单教程：用Python解决简单的水果分类问题

AiTechYun

print(fruits.shape)

(59, 7)

print(fruits['fruit_name'].unique())

[“苹果”柑橘”“橙子”“柠檬”]

print(fruits.groupby('fruit_name').size())

import seaborn as snssns.countplot(fruits['fruit_name'],label="Count")plt.show()

fruits.drop('fruit_label', axis=1).plot(kind='box', subplots=True, layout=(2,2), sharex=False, sharey=False, figsize=(9,9), title='Box Plot for each input variable')plt.savefig('fruits_box')plt.show()

import pylab as plfruits.drop('fruit_label' ,axis=1).hist(bins=30, figsize=(9,9))pl.suptitle("Histogram for each numeric input variable")plt.savefig('fruits_hist')plt.show()

from sklearn.model_selection import train_test_splitX_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)from sklearn.preprocessing import MinMaxScalerscaler = MinMaxScaler()X_train = scaler.fit_transform(X_train)X_test = scaler.transform(X_test)

from sklearn.linear_model import LogisticRegressionlogreg = LogisticRegression()logreg.fit(X_train, y_train)print('Accuracy of Logistic regression classifier on training set: {:.2f}' .format(logreg.score(X_train, y_train)))print('Accuracy of Logistic regression classifier on test set: {:.2f}' .format(logreg.score(X_test, y_test)))

from sklearn.tree import DecisionTreeClassifierclf = DecisionTreeClassifier().fit(X_train, y_train)print('Accuracy of Decision Tree classifier on training set: {:.2f}' .format(clf.score(X_train, y_train)))print('Accuracy of Decision Tree classifier on test set: {:.2f}' .format(clf.score(X_test, y_test)))

K-Nearest Neighbors（K-NN ）

from sklearn.neighbors import KNeighborsClassifierknn = KNeighborsClassifier()knn.fit(X_train, y_train)print('Accuracy of K-NN classifier on training set: {:.2f}' .format(knn.score(X_train, y_train)))print('Accuracy of K-NN classifier on test set: {:.2f}' .format(knn.score(X_test, y_test)))

from sklearn.discriminant_analysis import LinearDiscriminantAnalysislda = LinearDiscriminantAnalysis()lda.fit(X_train, y_train)print('Accuracy of LDA classifier on training set: {:.2f}' .format(lda.score(X_train, y_train)))print('Accuracy of LDA classifier on test set: {:.2f}' .format(lda.score(X_test, y_test)))

from sklearn.naive_bayes import GaussianNBgnb = GaussianNB()gnb.fit(X_train, y_train)print('Accuracy of GNB classifier on training set: {:.2f}' .format(gnb.score(X_train, y_train)))print('Accuracy of GNB classifier on test set: {:.2f}' .format(gnb.score(X_test, y_test)))

from sklearn.svm import SVCsvm = SVC()svm.fit(X_train, y_train)print('Accuracy of SVM classifier on training set: {:.2f}' .format(svm.score(X_train, y_train)))print('Accuracy of SVM classifier on test set: {:.2f}' .format(svm.score(X_test, y_test)))

KNN算法是我们尝试过的最精确的模型。混淆矩阵提供了在测试集上没有错误的指示。但是，测试集非常小。

from sklearn.metrics import classification_reportfrom sklearn.metrics import confusion_matrixpred = knn.predict(X_test)print(confusion_matrix(y_test, pred))print(classification_report(y_test, pred))

import matplotlib.cm as cmfrom matplotlib.colors import ListedColormap, BoundaryNormimport matplotlib.patches as mpatchesimport matplotlib.patches as mpatchesX = fruits[['mass', 'width', 'height', 'color_score']]y = fruits['fruit_label']X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)def plot_fruit_knn(X, y, n_neighbors, weights): X_mat = X[['height', 'width']].as_matrix() y_mat = y.as_matrix()# Create color maps cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF','#AFAFAF']) cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF','#AFAFAF'])clf = neighbors.KNeighborsClassifier(n_neighbors, weights=weights) clf.fit(X_mat, y_mat)# Plot the decision boundary by assigning a color in the color map # to each mesh point. mesh_step_size = .01 # step size in the mesh plot_symbol_size = 50 x_min, x_max = X_mat[:, 0].min() - 1, X_mat[:, 0].max() + 1 y_min, y_max = X_mat[:, 1].min() - 1, X_mat[:, 1].max() + 1 xx, yy = np.meshgrid(np.arange(x_min, x_max, mesh_step_size), np.arange(y_min, y_max, mesh_step_size)) Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])# Put the result into a color plot Z = Z.reshape(xx.shape) plt.figure() plt.pcolormesh(xx, yy, Z, cmap=cmap_light)# Plot training points plt.scatter(X_mat[:, 0], X_mat[:, 1], s=plot_symbol_size, c=y, cmap=cmap_bold, edgecolor = 'black') plt.xlim(xx.min(), xx.max()) plt.ylim(yy.min(), yy.max())patch0 = mpatches.Patch(color='#FF0000', label='apple') patch1 = mpatches.Patch(color='#00FF00', label='mandarin') patch2 = mpatches.Patch(color='#0000FF', label='orange') patch3 = mpatches.Patch(color='#AFAFAF', label='lemon') plt.legend(handles=[patch0, patch1, patch2, patch3])plt.xlabel('height (cm)')plt.ylabel('width (cm)')plt.title("4-Class classification (k = %i, weights = '%s')" % (n_neighbors, weights)) plt.show()plot_fruit_knn(X_train, y_train, 5, 'uniform')

k_range = range(1, 20)scores = []for k in k_range: knn = KNeighborsClassifier(n_neighbors = k) knn.fit(X_train, y_train) scores.append(knn.score(X_test, y_test))plt.figure()plt.xlabel('k')plt.ylabel('accuracy')plt.scatter(k_range, scores)plt.xticks([0,5,10,15,20])

• 发表于:
• 原文链接http://kuaibao.qq.com/s/20180104B0HGI900?refer=cp_1026
• 腾讯「腾讯云开发者社区」是腾讯内容开放平台帐号（企鹅号）传播渠道之一，根据《腾讯内容开放平台服务协议》转载发布内容。
• 如有侵权，请联系 cloudcommunity@tencent.com 删除。

2023-07-16

2020-01-14

2018-01-26

2018-06-25

2023-09-16

2018-07-23

2018-05-18

2018-05-06

2023-10-05

2018-05-06

2018-08-09

2023-12-20

2023-01-03

2018-06-14

2018-10-24

2018-06-13

2018-10-23

2018-06-14

2018-10-20

2019-07-26