You are on page 1of 14
‘is, 1:01 PM Untitled? - Jupyter Notebook In [9] import numpy as np import pandas as pd import seaborn as sns import matplotlib.pyplot as plt sns.set(style = "white", color_codes = True) import warnings warnings. filterwarnings(" ignore") import os print(os.1istdir aw ) ['.conda', '.condarc', '.continuum', '.ipynb_checkpoints', '.ipython', ‘.jup yter', ‘smatplotlib', '.ms-ad', ‘3D Objects", ‘AppData', ‘Application Data’, “contacts, ‘Cookies’, ‘Desktop’, ‘Documents’, "Downloads", "Exercise.clas s', ‘Exercise.java', ‘Favorites’, ‘IntelGraphicsProfiles', ‘Links’, ‘Local S ettings', 'mca2145045", ‘Music’, "My Documents’, 'NetHood’, 'NTUSER.DAT*, ‘n tuser.dat.LOG', ‘ntuser.dat.LOG2", NTUSER.DAT(53b39e88-18¢4-11ea-a811-@00d 3aa4692b}.TN.b1#", ‘NTUSER.DAT(S3b39¢88-18¢4-11ea-a811-200d32a4692).TMConta iner99e9eeee9eeeeeee0001.regtrans-ms', 'NTUSER.DAT{53b39e88-18c4-11ea-a811-0 @0d3a24692b}. THContainereeedeeeaeee0eaeee2.regtrans-ms", ‘ntuser.ini’, "On eOrive’, "Pictures, 'Printiiood’, "pythonaLab3.ipynb', ‘Recent’, ‘Saved Gane s', ‘Searches’, 'SendTo', ‘Start Menu', ‘Templates’, ‘Untitled Folder’, ‘Unt itled.ipynb", ‘Untitled1.ipynb", ‘Untitledi®.ipynb", ‘Untitledi1.ipynb’, ‘un titledi2.ipynb', ‘Untitled13.ipynb", ‘untitledi4. ipynb’, ‘Untitledis.ipynb", ‘Untitled16.ipynb', ‘Untitled17.ipynb', ‘Untitled18.ipynb', 'Untitled19.ipyn b', ‘Untitled2.ipynb", ‘Untitled2e. ipynb’, ‘Untitled21.ipynb", ‘Untitled22.i pynb", ‘Untitled3.ipynb', “Untitled4.ipynb", “UntitledS.ipynb*, ‘Untitleds.i pynb', ‘Untitled7.ipynb', ‘Untitled8.ipynb', ‘Untitled9.ipynb’, ‘Videos'] In [16]: iris = pd.read_csv("Iris.csv") iris-head() out[10]: sepallength sepal.width petal.ength petal.width variety ° 54 35 14 02 Setosa 1 49 30 14 02 Setosa 2 47 32 13 02 Setosa 3 46 a4 18 02 Setosa 4 50 36 14 02 Setosa locathost 8888 /notebooks/Documents/Uniilad2ipynb?kernel_name=python’ ana ‘is, 1:01 PM Untitled? - Jupyter Notebook In [11]: inis.describe() out[11]: sepalength sepalwidth petallength _petal.width eount 150.0000 150.000000 150.000000 150,000000 mean 6.843383 3.057333 3.758000 1.100838, std 0.828066 0.438866 1.768208 0.762238 min 4300000 2.000000 1.000000 0.100000 25% 5.100000 2.800000 1.600000 0.300000 50% 5.800000 3.000000 4350000 1.300000 75% 6400000 3.300000 5.100000 1.800000 max 7.900000 4.490000 6.900000 2.500000 In [13]: iris.plot(kind scatter", x = "sepal.length", y epal.width") *c* argument looks like a single numeric RGB or RGBA sequence, which should be avoided as value-mapping will have precedence in case its length matches with *x* & *y*. Please use the *color* keyword-argument or provide a 2D arr ay with a single row if you intend to specify the same RGB or RGBA value for all points. out[13]: 45 20 : 45°50 85 GO 6s 70 75 a0 sepal length locathost 8888 /notebooks/Documents/Uniilad2ipynb2kernel_name=python’ ana nite, 1:01 PM Untitled? - Jupyter Notebook In [26]: sns.jointplot(x = “sepal.length", y = “sepal.width" ,size = 5) Valueérror Traceback (most recent call last) Input In [20], in () -++-> 1 sns.jointplot(x = "sepal.length", y = "sepal.width" ,size 5) File C:\Programbata\Anaconda3\1ib\site-packages\seaborn\_decorators.py:46, i 1 _deprecate_positional_args..inner_f(*args, **kwargs) 36 warnings.warn( 37 ass the following variable{} as {}keyword arg{}: {}. " 38 From version @.12, the only valid positional argument “ 43 FutureWarning a) 45 kwargs.update({k: arg for k, arg in zip(sig.parameters, args)}) --+> 46 return #(**kwargs) File C:\Programbata\anaconda3\1ib\site-packages\seaborn\axisgrid.py:2238, in jointplot(x, y, data, kind, color, height, ratio, space, dropna, xlim, ylim, marginal ticks, joint_kws, marginal_kws, hue, palette, hue_order, hue_norm, *kwargs) 2227, dropna = True 2229 # Initialize the JointGrid object -> 2230 grid = JointGrid( 2231 data=data, x-x, y-y, hue-hue, 2232 pallette=palette, hue_order-hue_order, hue_norm-hue_norm, 2233 dropna=dropna, height=height, ratio-ratio, space-space, 2234 xlim=xlim, ylim-ylim, marginal_ticks=marginal_ticks, 2235 ) 2237 if grid.hue is not None: 2238 © marginal_kws.setdefault("legend", False) File C:\ProgramData\Anaconda3\1ib\site-packages\seaborn\_decorators.py:46, i nl _deprecate_positional_args..inner_f(*args, **kwargs) 36 warnings.warn( 37 “pass the following variable{} as {}keyword arg{}: {}. “ 38 “From version @.12, the only valid positional argument " 43 Futurewarning a) 45 kwargs.update({k: arg for k, arg in zip(sig.parameters, args)}) ---> 46 return £(**kwargs) File C:\Programbata\anaconda3\1ib\site-packages\seaborn\axisgrid.py:1702, in JointGrid. init_(self, x, y, data, height, ratio, space, dropna, xlim, yli m, size, marginal_ticks, hue, palette, hue_order, hue_norm) 1699 ax_marg_y.xaxis.grid(False) 1761 # Process the input variables -> 1702 p = VectorPlotter(data-data, variables-dict(x-x, y-y, hue-hue)) 1763 plot_data = p.plot_data.loc[:, p.plot_data.notna().any()] 1765 # Possibly drop NA File C:\Programbata\Anaconda3\1ib\site-packages\seaborn\_core.py:605, in Vec torPlotter. init__(self, data, variables) 603 def _ init__(self, data-None, variables={}): --> 685 self.assign_variables(data, variables) 607 for var, cls in self._semantic_mappings.items(): 608 locathost 8888 /notebooks/Documents/Uniilad2ipynb2kernel_name=python’ ana nite, 1:01 PM 609 610 Untitled? - Jupyter Notebook # Create the mapping function map_func = partial(cls.map, plotte elf) File C:\Programbata\anaconda3\1ib\site-packages\seaborn\_core.py:668, in Vec torPlotter.assign_variables(self, data, variables) 678 672 self.plot_data 673 self.variables self. input_format = “long” plot_data, variables - self._assign_variables_longform( data, **variables, ) plot_data variables File C:\Programbata\anaconda3\1ib\site-packages\seaborn\_core.py:983, in Vec torPlotter._assign_variables longform(self, data, **kwargs) 898 elif isinstance(val, (str, bytes)): 899 900 902 => 903 905 906 907 908 909 910 ies): # This looks like a column name but we don't know what it means! err = f"Could not interpret value “{val}° for parameter ~{key} raise ValueError(err) else: # Otherwise, assume the value is itself data #t Raise when data object is present and a vector can't matched if isinstance(data, pd.DataFrame) and not isinstance(val, pd.Ser ValueError: Could not interpret value “sepal.length” for parameter ~x* 10 08 06 04 02 00 00 02 04 08 B10 locathost 8888 /notebooks/Documents/Uniilad2ipynb2kernel_name=python’ ana ‘W122, 1:01 PM Until - Jupyer Notebook In [17]: sns.boxplot(x = "variety", y = “petal.length", data = iris) out [47]: ], []], dtype=object) Boxplot grouped by variaty oot nath setae 8 . Ea i fat =e ° ha = separa coal with 8 eS = + Se + a ° ee = =o ‘ear ons =o vec won| rent pene locathost 8888 /notebooks/Documents/Uniilad2ipynb2kernel_name=python’ 74 nite, 1:01 PM Untitled - Jupyter Notebook In [33]: from pandas.plotting import andrews_curves andrews_curves(iris, "variety") out [33]: selosa — versicolor — vepinica 6 0 In [29]: from pandas.plotting import parallel_coordinates parallel_coordinates(iris , "variety") out [29]: ‘sepal length ‘epal. with petal length petal width localhost 8888 notebooks/Documents/United2inynb?kernel_name=python’ ana ‘is, 1:01 PM Untitled? - Jupyter Notebook In [31]: from pandas.plotting import radviz radviz(iris , "variety") out [31]: 10 ears setosa © Versicoor fe Weoinica & ‘he oo | meena, tongs os “o setalwith” 45 40 05 08 05 10 15 In [35]: from sklearn.metrics import confusion_matrix from sklearn.metrics import classification_report from sklearn.model_selection import train_test_split In [36]: x y iris.iloc{:, :-1].values iris.iloc[:, -1].values X_train, X_test, y_train, y test = train_test_split(x, y, test_size = 0.2, randomstate = @ locathost 8888 /notebooks/Documents/Uniilad2ipynb2kernel_name=python’ ona ‘is, 1:01 PM Untitled? - Jupyter Notebook In [40]: from sklearn.linear_model import LogisticRegression classifier = LogisticRegression() classifier.#it(X_train, y_train) y_pred = classifier.predict(Xx_test) print(classification_report(y_test, y_pred)) print (confusion_matrix(y test, y_pred)) #accuracy Score from sklearn.metrics import accuracy_score print(‘accuracy is‘, accuracy_score(y pred, y_test)) precision recall f1-score support setosa 1.00 1.00 1.00 n versicolor 1.00 1.08 1.90 B Virginica 1.08 1.08 1.08 6 accuracy 1.00 30 macro avg 1.00 1.08 1.00 30 weighted ave ee 1.08 1.00 30 [[11 @ 9] [013 0] [@ @ 6] accuracy is 1.0 In [42]: from sklearn.neighbors import KNeighborsClassifier classifier = kNeighborsClassifier(n_neighbors = 8) classifier. fit(X_train, y train) y_pred = classifier. predict (Xx_test) print(classification_report(y_test, y_pred)) print (confusion_matrix(y test, y_pred)) from sklearn.metrics import accuracy_score print(*accuracy is', accuracy_score(y pred, y test)) precision recall f1-score support. Setosa 1.00 1.00 1.00 a Versicolor 1.00 1.00 1.00 B Virginica 1.00 1.00 1.00 6 accuracy 1.00 30 macro avg 1.00 1.00 1.00 30 weighted avg 1.00 1.00 1.80 30 [[11 @ @] [@13 @] Lee 6]] accuracy is 1.0 locathost 8888 /notebooks/Documents/Uniilad2ipynb2kernel_name=python’ 104 ‘W122, 1:01 PM Untiled2 - Jupyter Notebook In [43]: from sklearn.svm import SVC classifier = sve() classifier.fit(x_train, y_train) y_pred = classifier.predict(x_test) print (classification_report(y_test, y_pred)) print (confusion matrix(y test, y_pred)) from sklearn.metrics import accuracy_score print(‘accuracy is‘, accuracy_score(y pred, y_test)) precision recall f1-score support Setosa 1.00 1.00 1.00 cre Versicolor 1.00 1.00 1.00 13 Virginica 1.00 1.08 1.00 6 accuracy 1.00 30 macro avg 1.00 1.00 1.00 30 weighted avg 1.00 1.00 1.00 30 [[11 @ 9] [e@13 @] [@ @ 6)] accuracy is 1.0 In [45]: from sklearn.naive_bayes import GaussianNB classifier = GaussianNB() classifier. fit(x_train, y_train) y_pred = classifier.predict(x_test) print (classification_report(y_test, y_pred)) print (confusion_matrix(y test, y_pred)) from sklearn.metrics import accuracy_score print(‘accuracy is’, accuracy_score(y pred, y_test)) precision recall f1-score support setosa 1.00 1.00 1.00 u Versicolor 293 1088.96 B Virginica 0 = 8.83 8.9t 6 accuracy 0.97 30 macro avg 2.98 0.98 0.96 30 weighted avg 0.97 0.97 0.97 30 [f11 0 9] [013 6] te 2 5)] accuracy is @.9666666666666667 locathost 8888 /notebooks/Documents/Uniilad2ipynb2kernel_name=python’ nna ‘is, 1:01 PM In [46]: ternouLli Naive Bayes from sklearn.naive_bayes import BernoulliNB classfier = BernoulliNa() classifier.fit(x_train, y_train) y_pred = classifier.predict(Xx_test) print(classification_report(y_test, y_pred)) print(confusion_matrix(y test, y_pred)) from sklearn.metrics import accuracy_score print(‘accuracy is‘, accuracy_score(y_pred, y_test)) Setosa Versicolor Virginica accuracy macro avg weighted ave [[41 @ 9] [13 @] [@ 1 5)] precision 1.00 0.93 1.00 2.98 2.97 recall 1,00 1.00 0.83 0.94 0.97 accuracy is 0.9666666666666667 fl-score Untitled? - Jupyter Notebook support. cry 13 6 30 30 30 locathost 8888 /notebooks/Documents/Uniilad2ipynb2kernel_name=python’ vane ‘is, 1:01 PM Untitled? - Jupyter Notebook In [51]: from sklearn.metrics import accuracy score, log_loss classifiers = [ GaussianNB(), Bernoul1iNnB(), ] log cols = ["Classifier", log = pd.DataFrame(columns = log cols) for clf in classifiers: clf.#it(X_train, y_train) name = clf._class_._name_ print(" =" * 30) print (name) print("****Results**** train_predictions = clf.predict(X_test) ace = accuracy_score(y_test, train_predictions) print("Accuracy: {:.4%}".format(acc)) log_entry = pd.DataFrame([[name, ace * 10, 11}], colunns = log_cols) log = 1og-append(log_entry) print(" * 30) GaussianNB seetResults**** Accuracy: 96.6667% Bernoul ins wereResultste* Accuracy: 2@.0000% mnt] Int] locathost 8888 /notebooks/Documents/Uniilad2ipynb2kernel_name=python’ ran ‘is, 1:01 PM Untitled? - Jupyter Notebook localhost 888/notebooks/Documents/Unitted2ipyabkernel_nam vane

You might also like