Professional Documents
Culture Documents
U4ECE22115)
Experiment 1: Performance decision tree (DT) and Naive Bayes (NB) in IRIS dataset and note the accuracy using both classifiers 70-30% training-test split.
In [43]: iris.head()
Experiment 2: Compare the performance of both classifiers (DT and NB) using 10-fold, leave one out, and 10-fold stratified cross-validation.
# Cross-validation methods
cv_methods = ['10-fold', 'Leave One Out', 'Stratified 10-fold']
classifiers = [DecisionTreeClassifier(), GaussianNB()]
# Label Encoding
le = LabelEncoder()
iris['species_encoded'] = le.fit_transform(iris['Species'])
svm_classifier_standardized = SVC(kernel='linear')
svm_classifier_standardized.fit(X_train_standardized, y_train_encoded)
y_pred_standardization = svm_classifier_standardized.predict(X_test_standardized)
accuracy_standardization = accuracy_score(y_test_encoded, y_pred_standardization)
# Print accuracies
print("Accuracy without Standardization:", accuracy_no_standardization)
print("Accuracy with Standardization:", accuracy_standardization)
print("Accuracy without Normalization:", accuracy_no_normalization)
print("Accuracy with Normalization:", accuracy_normalization)
In [47]: iris.head()
Out[47]: Id SepalLengthCm SepalWidthCm PetalLengthCm PetalWidthCm Species species_encoded species_0 species_1 species_2
Experiment 5: Perform colour classification for the above dataset using SVM, k-NN, DT, NB classifier.
# Initialize classifiers
svm_classifier = SVC(kernel='linear')
knn_classifier = KNeighborsClassifier(n_neighbors=5)
dt_classifier = DecisionTreeClassifier()
nb_classifier = GaussianNB()
# Train classifiers
svm_classifier.fit(X_train, y_train)
knn_classifier.fit(X_train, y_train)
dt_classifier.fit(X_train, y_train)
nb_classifier.fit(X_train, y_train)
# Predictions
y_pred_svm = svm_classifier.predict(X_test)
y_pred_knn = knn_classifier.predict(X_test)
y_pred_dt = dt_classifier.predict(X_test)
y_pred_nb = nb_classifier.predict(X_test)
# Calculate accuracies
accuracy_svm = accuracy_score(y_test, y_pred_svm)
accuracy_knn = accuracy_score(y_test, y_pred_knn)
accuracy_dt = accuracy_score(y_test, y_pred_dt)
accuracy_nb = accuracy_score(y_test, y_pred_nb)
# Print accuracies
print("SVM Accuracy:", accuracy_svm)
print("k-NN Accuracy:", accuracy_knn)
print("Decision Tree Accuracy:", accuracy_dt)
print("Naive Bayes Accuracy:", accuracy_nb)
# Initialize classifiers
svm_classifier = SVC(kernel='linear')
knn_classifier = KNeighborsClassifier(n_neighbors=5)
dt_classifier = DecisionTreeClassifier()
nb_classifier = GaussianNB()
# Train classifiers
svm_classifier.fit(X_train, y_train)
knn_classifier.fit(X_train, y_train)
dt_classifier.fit(X_train, y_train)
nb_classifier.fit(X_train, y_train)
# Predictions
y_pred_svm = svm_classifier.predict(X_test)
y_pred_knn = knn_classifier.predict(X_test)
y_pred_dt = dt_classifier.predict(X_test)
y_pred_nb = nb_classifier.predict(X_test)
# Calculate accuracies
accuracy_svm = accuracy_score(y_test, y_pred_svm)
accuracy_knn = accuracy_score(y_test, y_pred_knn)
accuracy_dt = accuracy_score(y_test, y_pred_dt)
accuracy_nb = accuracy_score(y_test, y_pred_nb)
# Print accuracies
print("SVM Accuracy:", accuracy_svm)
print("k-NN Accuracy:", accuracy_knn)
print("Decision Tree Accuracy:", accuracy_dt)
print("Naive Bayes Accuracy:", accuracy_nb)
In [ ]: