此範例利用Decision Tree從數據中學習一組if-then-else決策規則,逼近加有雜訊的sine curve,因此它模擬出局部的線性迴歸以近似sine curve。 若決策樹深度越深(可由max_depth參數控制),則決策規則越複雜,模型也會越接近數據,但若數據中含有雜訊,太深的樹就有可能產生過擬合的情形。 此範例模擬了不同深度的樹,當用帶有雜點的數據可能造成的情況。
import numpy as np
from sklearn.tree import DecisionTreeRegressor
import matplotlib.pyplot as plt
rng = np.random.RandomState(1)
X = np.sort(5* rng.rand(80, 1), axis=0) #0~5之間隨機產生80個數值
y = np.sin(X).ravel()
y[::5] += 3 * (0.5 - rng.rand(16)) #每5筆資料加入一個雜訊
regr_1 = DecisionTreeRegressor(max_depth=2) #最大深度為2的決策樹
regr_2 = DecisionTreeRegressor(max_depth=5) #最大深度為5的決策樹
regr_1.fit(X, y)
regr_2.fit(X, y)
X_test = np.arange(0.0, 5.0, 0.01)[:, np.newaxis]
y_1 = regr_1.predict(X_test)
y_2 = regr_2.predict(X_test)
plt.figure()
plt.scatter(X, y, c="darkorange", label="data")
plt.plot(X_test, y_1, color="cornflowerblue", label="max_depth=2", linewidth=2)
plt.plot(X_test, y_2, color="yellowgreen", label="max_depth=5", linewidth=2)
plt.xlabel("data") #x軸代表data數值
plt.ylabel("target") #y軸代表target數值
plt.title("Decision Tree Regression") #標示圖片的標題
plt.legend() #繪出圖例
plt.show()
print(__doc__)
# Import the necessary modules and libraries
import numpy as np
from sklearn.tree import DecisionTreeRegressor
import matplotlib.pyplot as plt
# Create a random dataset
rng = np.random.RandomState(1)
X = np.sort(5 * rng.rand(80, 1), axis=0)
y = np.sin(X).ravel()
y[::5] += 3 * (0.5 - rng.rand(16))
# Fit regression model
regr_1 = DecisionTreeRegressor(max_depth=2)
regr_2 = DecisionTreeRegressor(max_depth=5)
regr_1.fit(X, y)
regr_2.fit(X, y)
# Predict
X_test = np.arange(0.0, 5.0, 0.01)[:, np.newaxis]
y_1 = regr_1.predict(X_test)
y_2 = regr_2.predict(X_test)
# Plot the results
plt.figure()
plt.scatter(X, y, c="darkorange", label="data")
plt.plot(X_test, y_1, color="cornflowerblue", label="max_depth=2", linewidth=2)
plt.plot(X_test, y_2, color="yellowgreen", label="max_depth=5", linewidth=2)
plt.xlabel("data")
plt.ylabel("target")
plt.title("Decision Tree Regression")
plt.legend()
plt.show()