题目
利用python,生成two-moons仿真数据,70%作为训练样本,30%作为测试样本。结合已学过的贝叶斯、神经网络、支持向量机等监督学习算法,实现Adaboost、基于平均和投票法的集成学习算法,采用准确率比较不同的算法优劣。具体指标有:
(1)Adaboost:基分类器采用不同参数设置下的支持向量机,比如C、核,或者贝叶斯、神经网络等分类器。
(2)基于平均和投票法的集成学习:基分类器须采用不同的贝叶斯、支持向量机。
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import VotingClassifier
from sklearn.svm import SVC
from sklearn import metrics
from multiprocessing import Process
from sklearn.neural_network import MLPClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
#会出现warning
import warnings
warnings.filterwarnings("ignore", category=FutureWarning, module="sklearn", lineno=193)
#生成数据集
x,y=make_moons(n_samples=10000,noise=0.4)
x_train,x_test,y_train,y_test=train_test_split(x,y,test_size=0.3)
print(x_train)
#Adaboost
sacc=[]
def ab(c,kernal,estimator):
abc = AdaBoostClassifier(SVC(C=c, kernel=kernal),
algorithm="SAMME",
n_estimators=estimator)
abc.fit(x_train, y_train)
y_pred = abc.predict(x_test)
acc = metrics.accuracy_score(y_test, y_pred)
sacc.append([c,kernal,estimator,acc])
print(1)
subprocess=[]
n_k=['linear','poly','rbf']
n_c=[]
n_e=[]
for i in range(5,20,5):
n_c.append(i)
for i in range(20,50,5):
n_e.append(i)
for c in n_c:
for estimator in n_e:
subprocess=[]
for kernal in n_k:
ab(c,kernal,estimator)
print(sacc)
from sklearn.naive_bayes import GaussianNB
abc = AdaBoostClassifier(GaussianNB(),
algorithm="SAMME",
n_estimators=70)
abc.fit(x_train, y_train)
y_pred = abc.predict(x_test)
acc = metrics.accuracy_score(y_test, y_pred)
print('adaboost accuracy: ', acc)
# voting
cf1 = SVC(C=10, probability=True)
cf2 = SVC(C=20, kernel='linear', probability=True)
cf3 = SVC(C=30, kernel='poly', probability=True)
vc = VotingClassifier(estimators=[('svc_rbf', cf1), ('svm_linear', cf2),
('svc_poly', cf3)],
voting='soft', weights=[2, 1, 2])
vc.fit(x_train, y_train)
y_pred = vc.predict(x_test)
acc = metrics.accuracy_score(y_test, y_pred)
print('voting accuracy: ', acc)
voting_clf = VotingClassifier(estimators=[
('log_clf',LogisticRegression()),
('svm_clf',SVC(probability=True)),
('mlp_clf',MLPClassifier(solver='lbfgs', alpha=1e-5,hidden_layer_sizes=(5, 2), random_state=1)),
('nb_clf',GaussianNB())],
voting= 'soft')
voting_clf.fit(x_train,y_train)
print(voting_clf.score(x_test,y_test))
voting_clf = VotingClassifier(estimators=[
('log_clf',LogisticRegression()),
('svm_clf',SVC(probability=True)),
('mlp_clf',MLPClassifier(solver='lbfgs', alpha=1e-5,hidden_layer_sizes=(5, 2), random_state=1)),
('nb_clf',GaussianNB())],
voting= 'hard')
voting_clf.fit(x_train,y_train)
print(voting_clf.score(x_test,y_test))