You are on page 1of 4
snin2t, 12:40 AM In [21]: In [22]: In [23]: out [23. In [26]: Experiment 2:3 from sklearn.datasets import load_iris from sklearn import tree import matplotlib.pyplot as plt import numpy as np from sklearn import datasets from sklearn.metrics import accuracy_score from sklearn.linear_model import LogisticRegression from sklearn.svm import SVC from sklearn.gaussian_process import GaussianProcessClassifier from sklearn.gaussian_process.kernels import RBF from sklearn.tree import DecisionTreeRegressor iris = datasets.load_iris() X = iris.data[:, @:2] # we only take the first two features for visualization y = inis.target y array([2, ®, @, @, ®, ®, ®, ® 2 ® @, 2, @ @ @, 2, 2 @, 2, 2, 2 2 2, @, 2 @, @, 0, 2, 2 2 2 2 ® 2B, ® 2 2, 2 2 2 @ 2 @, 222% %%114141114141141,1,1,1,1, 1,1, LLULEGGRRRR4RR55441414111,1, 2,1,1,1,1,1,1,1,1,1, 1,1, 2, 2, 2,2,2, 2,2, 2,2, 2, 2y 2p 2p 2y 2s 2s 2s 2 2, 2s 2s Zs Zs 2% BW Bs 2s 2 2s Bs BZ n_features = X.shape[1] c=10 kernel = 1.0 * RBF([1.0, 1.0]) # for GPC # Create different classifiers. classifiers = { “L logistic": LogisticRegression( C=C, penalty="11", solver="saga", multi_class="multinomial", max_iter= e288 ds “L2 logistic (Multinomial)": LogisticRegression( ", solver="saga", multi_class="nultinomial”, max_iter= Logistickegression( » solver="saga", multi_class="ovr", max_iter=10000 ‘Linear SVC": SVC(kernel="linear", C=C, probability-True, random_state=0), "GPC": GaussianProcessClassifier(kernel), 3 n_classifiers = len(classifiers) localhost 8e8inbconvertimiMachine Learning practicalsiExperiment 2.3pynb?download=false Me snin2t, 12:40 AM Experiment 2:3 In (27 plt.figure(figsize-(3 * 2, n_classifiers * 2)) plt.subplots_adjust(bottom=@.2, top=0.95) xx = np. linspace(3, 9, 100) yy = np.linspace(1, 5, 100).T Xx, yy = np.meshgrid(xx, yy) xfull = np.c_[xx.ravel(), yy-ravel()] for index, (name, classifier) in enunerate(classifiers. classifier. fit(x, y) y_pred = classifier. predict (x) accuracy = accuracy_score(y, y_pred) print("Accuracy (train) for % # View probabilities: probas = classifier.predict_proba(xfull) n_classes = np.unique(y_pred).size for k in range(n_classes. WO.1%% " % (name, accuracy * 100) plt.subplot(n_classifiers, n_classes, index * n_classes + k + 1) plt.title("Class Xd" % k) if k == 0: plt.ylabel (name) imshow_handle = plt.imshow( probas[ : er’ ) plt.xticks(()) plt.yticks(()) idx = y_pred if idx.any(): plt.scatter(x[idx, @], X[idx, 1], marke k ) ax = plt.axes([9.15, 0.04, 8.7, @.05]) plt.title("Probability") plt.colorbar(imshow_handle, ca jorizontal") x, orientation pit. show() localhost 8e8inbconvertimiMachine Learning practicalsiExperiment 2.3pynb?download=false k].reshape((10@, 10@)), extent=(3, 9, 1, 5), origin="low ', edgecolo 26 snin2t, 12:40 AM In [ In [ In [ In [ localhost 8846inbconverthimiMachine Learning praccals/Experiment 2.3. pynb2downlo: Experiment 2:3 Accuracy (train) for L1 logistic: 83.3% Accuracy (train) for L2 logistic (Multinomial): 82.7% accuracy (train) for L2 logistic (OvR): 79.3% Accuracy (train) for Linear SVC: 82.0% Accuracy (train) for GPC: 82.7% Class 0 Class 1 Class 2 Logistic Class 0 Class 1 Class 2 Class 0 Class 1 Class 2 [2 Hoalstic (VR). L2 logistic (Multinoms Class 0 Class 1 Class 2 Linear SVC Class 0 Class 1 Class 2 pc Probability 01 02 03 04 05 06 a7 WF 121, 12:40 AM Experiment 2.3 Inf] localhost 8888inbconverthimiMachine Learning practcalsiExperiment 2.3 lpynb’tdownloa

You might also like