《机器学习Python实现_10_08_集成学习_bagging_randomforest实现》
2021/5/6 22:25:41
本文主要是介绍《机器学习Python实现_10_08_集成学习_bagging_randomforest实现》,对大家解决编程问题具有一定的参考价值,需要的程序猿们随着小编来一起学习吧!
一.简介
为了让学习器越发的不同,randomforest的思路是在bagging的基础上再做一次特征的随机抽样,大致流程如下:
二.RandomForest:分类实现
import os os.chdir('../') from ml_models import utils from ml_models.tree import CARTClassifier import copy import numpy as np """ randomforest分类实现,封装到ml_models.ensemble """ class RandomForestClassifier(object): def __init__(self, base_estimator=None, n_estimators=10, feature_sample=0.66): """ :param base_estimator: 基学习器,允许异质;异质的情况下使用列表传入比如[estimator1,estimator2,...,estimator10],这时n_estimators会失效; 同质的情况,单个estimator会被copy成n_estimators份 :param n_estimators: 基学习器迭代数量 :param feature_sample:特征抽样率 """ self.base_estimator = base_estimator self.n_estimators = n_estimators if self.base_estimator is None: # 默认使用决策树 self.base_estimator = CARTClassifier() # 同质分类器 if type(base_estimator) != list: estimator = self.base_estimator self.base_estimator = [copy.deepcopy(estimator) for _ in range(0, self.n_estimators)] # 异质分类器 else: self.n_estimators = len(self.base_estimator) self.feature_sample = feature_sample # 记录每个基学习器选择的特征 self.feature_indices = [] def fit(self, x, y): # TODO:并行优化 n_sample, n_feature = x.shape for estimator in self.base_estimator: # 重采样训练集 indices = np.random.choice(n_sample, n_sample, replace=True) x_bootstrap = x[indices] y_bootstrap = y[indices] # 对特征抽样 feature_indices = np.random.choice(n_feature, int(n_feature * self.feature_sample), replace=False) self.feature_indices.append(feature_indices) x_bootstrap = x_bootstrap[:, feature_indices] estimator.fit(x_bootstrap, y_bootstrap) def predict_proba(self, x): # TODO:并行优化 probas = [] for index, estimator in enumerate(self.base_estimator): probas.append(estimator.predict_proba(x[:, self.feature_indices[index]])) return np.mean(probas, axis=0) def predict(self, x): return np.argmax(self.predict_proba(x), axis=1)
#造伪数据 from sklearn.datasets import make_classification data, target = make_classification(n_samples=100, n_features=2, n_classes=2, n_informative=1, n_redundant=0, n_repeated=0, n_clusters_per_class=1, class_sep=.5,random_state=21)
#同质 classifier = RandomForestClassifier(feature_sample=0.6) classifier.fit(data, target) utils.plot_decision_function(data, target, classifier)
#异质 from ml_models.linear_model import LogisticRegression from ml_models.svm import SVC classifier = RandomForestClassifier(base_estimator=[LogisticRegression(),SVC(kernel='rbf',C=5.0),CARTClassifier(max_depth=2)],feature_sample=0.6) classifier.fit(data, target) utils.plot_decision_function(data, target, classifier)
三.代码实现:回归
from ml_models.tree import CARTRegressor """ random forest回归实现,封装到ml_models.ensemble """ class RandomForestRegressor(object): def __init__(self, base_estimator=None, n_estimators=10, feature_sample=0.66): """ :param base_estimator: 基学习器,允许异质;异质的情况下使用列表传入比如[estimator1,estimator2,...,estimator10],这时n_estimators会失效; 同质的情况,单个estimator会被copy成n_estimators份 :param n_estimators: 基学习器迭代数量 :param feature_sample:特征抽样率 """ self.base_estimator = base_estimator self.n_estimators = n_estimators if self.base_estimator is None: # 默认使用决策树 self.base_estimator = CARTRegressor() # 同质 if type(base_estimator) != list: estimator = self.base_estimator self.base_estimator = [copy.deepcopy(estimator) for _ in range(0, self.n_estimators)] # 异质 else: self.n_estimators = len(self.base_estimator) self.feature_sample = feature_sample # 记录每个基学习器选择的特征 self.feature_indices = [] def fit(self, x, y): # TODO:并行优化 n_sample, n_feature = x.shape for estimator in self.base_estimator: # 重采样训练集 indices = np.random.choice(n_sample, n_sample, replace=True) x_bootstrap = x[indices] y_bootstrap = y[indices] # 对特征抽样 feature_indices = np.random.choice(n_feature, int(n_feature * self.feature_sample), replace=False) self.feature_indices.append(feature_indices) x_bootstrap = x_bootstrap[:, feature_indices] estimator.fit(x_bootstrap, y_bootstrap) def predict(self, x): # TODO:并行优化 preds = [] for index, estimator in enumerate(self.base_estimator): preds.append(estimator.predict(x[:, self.feature_indices[index]])) return np.mean(preds, axis=0)
#构造数据 data = np.linspace(1, 10, num=100) target1 = 3*data[:50] + np.random.random(size=50)*3#添加噪声 target2 = 3*data[50:] + np.random.random(size=50)*10#添加噪声 target=np.concatenate([target1,target2]) data = data.reshape((-1, 1))
#同质 import matplotlib.pyplot as plt model=RandomForestRegressor(base_estimator=CARTRegressor(),n_estimators=2,feature_sample=1)#feature就一列,没办法... model.fit(data,target) plt.scatter(data, target) plt.plot(data, model.predict(data), color='r')
[<matplotlib.lines.Line2D at 0x18f3f5866d8>]
#异质 from ml_models.linear_model import LinearRegression model=RandomForestRegressor(base_estimator=[LinearRegression(),CARTRegressor()],feature_sample=1) model.fit(data,target) plt.scatter(data, target) plt.plot(data, model.predict(data), color='r')
[<matplotlib.lines.Line2D at 0x18f2d6dd160>]
这篇关于《机器学习Python实现_10_08_集成学习_bagging_randomforest实现》的文章就介绍到这儿,希望我们推荐的文章对大家有所帮助,也希望大家多多支持为之网!
- 2024-12-20Python编程入门指南
- 2024-12-20Python编程基础与进阶
- 2024-12-19Python基础编程教程
- 2024-12-19python 文件的后缀名是什么 怎么运行一个python文件?-icode9专业技术文章分享
- 2024-12-19使用python 把docx转为pdf文件有哪些方法?-icode9专业技术文章分享
- 2024-12-19python怎么更换换pip的源镜像?-icode9专业技术文章分享
- 2024-12-19Python资料:新手入门的全面指南
- 2024-12-19Python股票自动化交易实战入门教程
- 2024-12-19Python股票自动化交易入门教程
- 2024-12-18Python量化入门教程:轻松掌握量化交易基础知识