1000字范文,内容丰富有趣,学习的好帮手!
1000字范文 > Python 机器学习 利用sklearn构建决策树的实现 2

Python 机器学习 利用sklearn构建决策树的实现 2

时间:2023-05-27 04:47:37

相关推荐

Python 机器学习 利用sklearn构建决策树的实现 2

决策树

import numpy as npimport os%matplotlib inlineimport matplotlibimport matplotlib.pyplot as pltplt.rcParams['axes.labelsize'] = 14plt.rcParams['xtick.labelsize'] = 12plt.rcParams['ytick.labelsize'] = 12import warningswarning.filterwarnings('ignore')

树模型的可视化展示

下载安装包:https://graphviz.gitlab.io/_pages/Download/Download_windows.html环境变量配置:/article/020278115032461bcc9ce598.html

from sklearn.datasets import load_irisfrom sklearn.tree import DecisionTreeClassifieriris = load_iris()X = iris.data[:,2:]y = iris.targettree_clf = DecisionTreeClassifier(max_depth = 2)tree_clf.fit(X,y)

from sklearn.tree import export_graphvizexport_graphviz(tree_clf,out_file = 'iris_tree.dot',feature_names = iris.feature_names[2:],class_names = iris.target_names,rounded = True,filled = True)然后,你可以使用graphviz包中的dot命令行工具将此.dot文件转换为各种格式,如PDF或PNG。下面这条命令行将.dot文件转换为.png图像文件:$ dot -Tpng iris_tree.dot -o iris_tree.pngfrom Ipython.display import ImageImage(filename = 'iris_tree.png', width = 400, height = 400)

决策边界展示

from matplotlib.colors import ListedColormapdef plot_decision_boundary(clf, X, y, axes=[0, 7.5, 0, 3], iris=True, legend=False, plot_training=True):x1s = np.linspace(axes[0], axes[1], 100)x2s = np.linspace(axes[2], axes[3], 100)x1, x2 = np.meshgrid(x1s, x2s)X_new = np.c_[x1.ravel(), x2.ravel()]y_pred = clf.predict(X_new).reshape(x1.shape)custom_cmap = ListedColormap(['#fafab0','#9898ff','#a0faa0'])plt.contourf(x1, x2, y_pred, alpha=0.3, cmap=custom_cmap)if not iris:custom_cmap2 = ListedColormap(['#7d7d58','#4c4c7f','#507d50'])plt.contour(x1, x2, y_pred, cmap=custom_cmap2, alpha=0.8)if plot_training:plt.plot(X[:, 0][y==0], X[:, 1][y==0], "yo", label="Iris-Setosa")plt.plot(X[:, 0][y==1], X[:, 1][y==1], "bs", label="Iris-Versicolor")plt.plot(X[:, 0][y==2], X[:, 1][y==2], "g^", label="Iris-Virginica")plt.axis(axes)if iris:plt.xlabel("Petal length", fontsize=14)plt.ylabel("Petal width", fontsize=14)else:plt.xlabel(r"$x_1$", fontsize=18)plt.ylabel(r"$x_2$", fontsize=18, rotation=0)if legend:plt.legend(loc="lower right", fontsize=14)plt.figure(figsize=(8, 4))plot_decision_boundary(tree_clf, X, y)plt.plot([2.45, 2.45], [0, 3], "k-", linewidth=2)plt.plot([2.45, 7.5], [1.75, 1.75], "k--", linewidth=2)plt.plot([4.95, 4.95], [0, 1.75], "k:", linewidth=2)plt.plot([4.85, 4.85], [1.75, 3], "k:", linewidth=2)plt.text(1.40, 1.0, "Depth=0", fontsize=15)plt.text(3.2, 1.80, "Depth=1", fontsize=13)plt.text(4.05, 0.5, "(Depth=2)", fontsize=11)plt.title('Decision Tree decision boundaries')plt.show()

概率估计

估计类概率输入数据为:花瓣长5厘米,宽1.5厘米的花。 相应的叶节点是深度为2的左节点,因此决策树应输出以下概率:

Iris-Setosa 为 0%(0/54),Iris-Versicolor 为 90.7%(49/54),Iris-Virginica 为 9.3%(5/54)。

决策树中的正则化

DecisionTreeClassifier类还有一些其他参数类似地限制了决策树的形状:

min_samples_split(节点在分割之前必须具有的最小样本数),

min_samples_leaf(叶子节点必须具有的最小样本数),

max_leaf_nodes(叶子节点的最大数量),

max_features(在每个节点处评估用于拆分的最大特征数)。

max_depth(树最大的深度)

from sklearn.datasets import make_moonsX,y = make_moons(n_samples = 100, noise = 0.25, random_state = 53)tree_clf1 = DecisionTreeClassifier(random_state = 42)tree_clf2 = DecisionTreeClassifier(min_samples_leaf = 4, random_state =42)tree_clf1.fit(X,y)tree_clf2.fit(X,y)plt.figure(figsize = (12,4))plt.subplot(121)plot_decision_boundary(tree_clf1,X,y,axes = [-1.5,2.5,-1,1.5], iris = False)plt.title('No restrictions')plt.subplot(122)plot_decision_boundary(tree_clf2,X,y,axes=[-1.5,2.5,-1,1.5],iris=False)plt.title('min_samples_leaf=4')

对数据的敏感

np.random.seed(6)Xs = np.random.rand(100,2) - 0.5ys = (Xs[:,0] > 0).astype(np.float32) * 2angle = np.pi / 4rotation_=matrix = np.array([[np.cos(angle), -np.sin(angle)],[np.sin(angle),np.cos(angle)]])Xsr = Xs.dot(rotation_matrix)tree_clf_s = DecisionTreeClassifier(random_state = 42)tree_clf_s.fit(Xs, ys)tree_clf_sr = DecisionTreeClassifier(random_state = 42)tree_clf_sr.fit(Xsr, ys)plt.figure(figsize=(11, 4))plt.subplot(121)plot_decision_boundary(tree_clf_s, Xs, ys, axes=[-0.7, 0.7, -0.7, 0.7], iris=False)plt.title('Sensitivity to training set rotation')plt.subplot(122)plot_decision_boundary(tree_clf_sr, Xsr, ys, axes=[-0.7, 0.7, -0.7, 0.7], iris=False)plt.title('Sensitivity to training set rotation')plt.show()

np.random.seed(42)m = 200X = np.random.rand(m,1)y = 4 * (X - 0.5) ** 2y = y + np.random.randn(m,1) / 10from sklearn.tree import DecisionTreeRegressortree_reg = DecisionTreeRegreesosr(max_depth = 2)tree_reg.fit(X,y)export_graphviz(tree_reg,out_file = ("regression_tree.dot"),feature_names = ["x1"],rounded = True,filled = True)from IPython.display import ImageImage(filename = "regression_tree.png", width = 400, height = 400)

from sklearn.tree import DecisionTreeRegressortree_reg1 =DecisionTreeRegressor(random_state = 42, max_depth = 2)tree_reg2 =DecisionTreeRegressor(random_state = 42, max_depth = 3)tree_reg1.fit(X,y)tree_reg2.fit(X,y)def plot_regression_predictions(tree_reg, X, y, axes=[0, 1, -0.2, 1], ylabel="$y$"):x1 = np.linspace(axes[0], axes[1], 500).reshape(-1, 1)y_pred = tree_reg.predict(x1)plt.axis(axes)plt.xlabel("$x_1$", fontsize=18)if ylabel:plt.ylabel(ylabel, fontsize=18, rotation=0)plt.plot(X, y, "b.")plt.plot(x1, y_pred, "r.-", linewidth=2, label=r"$\hat{y}$")plt.figure(figsize=(11, 4))plt.subplot(121)plot_regression_predictions(tree_reg1, X, y)for split, style in ((0.1973, "k-"), (0.0917, "k--"), (0.7718, "k--")):plt.plot([split, split], [-0.2, 1], style, linewidth=2)plt.text(0.21, 0.65, "Depth=0", fontsize=15)plt.text(0.01, 0.2, "Depth=1", fontsize=13)plt.text(0.65, 0.8, "Depth=1", fontsize=13)plt.legend(loc="upper center", fontsize=18)plt.title("max_depth=2", fontsize=14)plt.subplot(122)plot_regression_predictions(tree_reg2, X, y, ylabel=None)for split, style in ((0.1973, "k-"), (0.0917, "k--"), (0.7718, "k--")):plt.plot([split, split], [-0.2, 1], style, linewidth=2)for split in (0.0458, 0.1298, 0.2873, 0.9040):plt.plot([split, split], [-0.2, 1], "k:", linewidth=1)plt.text(0.3, 0.5, "Depth=2", fontsize=13)plt.title("max_depth=3", fontsize=14)plt.show()

tree_reg1 = DecisionTreeRegressor(random_state = 42)tree_reg2 = DecisionTreeRegressor(random_state = 42, min_sample_leaf = 10)tree_reg1.fit(X, y)tree_reg2.fit(X, y)x1 = np.linspace(0,1,500).reshape(-1,1)y_pred1 = tree_reg1.predict(x1)y_pred2 = tree_reg2.predict(x1)plt.figure(figsize=(11, 4))plt.subplot(121)plt.plot(X, y, "b.")plt.plot(x1, y_pred1, "r.-", linewidth=2, label=r"$\hat{y}$")plt.axis([0, 1, -0.2, 1.1])plt.xlabel("$x_1$", fontsize=18)plt.ylabel("$y$", fontsize=18, rotation=0)plt.legend(loc="upper center", fontsize=18)plt.title("No restrictions", fontsize=14)plt.subplot(122)plt.plot(X, y, "b.")plt.plot(x1, y_pred2, "r.-", linewidth=2, label=r"$\hat{y}$")plt.axis([0, 1, -0.2, 1.1])plt.xlabel("$x_1$", fontsize=18)plt.title("min_samples_leaf={}".format(tree_reg2.min_samples_leaf), fontsize=14)plt.show()

本内容不代表本网观点和政治立场,如有侵犯你的权益请联系我们处理。
网友评论
网友评论仅供其表达个人看法,并不表明网站立场。