You are on page 1of 3

CONTOH IMPLEMENTASI ADABOOST DENGAN R

library(adabag)

## Warning: package 'adabag' was built under R version 4.1.3

## Loading required package: rpart

## Loading required package: caret

## Warning: package 'caret' was built under R version 4.1.3

## Loading required package: ggplot2

## Warning: package 'ggplot2' was built under R version 4.1.3

## Loading required package: lattice

## Loading required package: foreach

## Warning: package 'foreach' was built under R version 4.1.3

## Loading required package: doParallel

## Warning: package 'doParallel' was built under R version 4.1.3

## Loading required package: iterators

## Warning: package 'iterators' was built under R version 4.1.3

## Loading required package: parallel

library(caret)
library(car)

## Warning: package 'car' was built under R version 4.1.3

## Loading required package: carData

## Warning: package 'carData' was built under R version 4.1.3

##Contoh Code Di R
#menggunakan data Iris

data1<- iris
head(data1)

## Sepal.Length Sepal.Width Petal.Length Petal.Width Species


## 1 5.1 3.5 1.4 0.2 setosa
## 2 4.9 3.0 1.4 0.2 setosa
## 3 4.7 3.2 1.3 0.2 setosa
## 4 4.6 3.1 1.5 0.2 setosa
## 5 5.0 3.6 1.4 0.2 setosa
## 6 5.4 3.9 1.7 0.4 setosa

#membuat partisi secara random dengan ketentuan data training 90% dan data
test 10%
parts = createDataPartition(data1$Species, p = 0.9, list = F)
train = data1[parts, ]
test = data1[-parts, ]

#membuat model menggunakan data training


model_adaboost <- boosting(Species~., data=train, boos=TRUE, mfinal=2)
model_adaboost$trees

## [[1]]
## n= 135
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 135 84 setosa (0.37777778 0.28148148 0.34074074)
## 2) Petal.Length< 2.35 51 0 setosa (1.00000000 0.00000000 0.00000000) *
## 3) Petal.Length>=2.35 84 38 virginica (0.00000000 0.45238095 0.54761905)

## 6) Petal.Width< 1.7 41 3 versicolor (0.00000000 0.92682927


0.07317073) *
## 7) Petal.Width>=1.7 43 0 virginica (0.00000000 0.00000000 1.00000000)
*
##
## [[2]]
## n= 135
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 135 80 virginica (0.29629630 0.29629630 0.40740741)
## 2) Petal.Length< 2.35 40 0 setosa (1.00000000 0.00000000 0.00000000) *
## 3) Petal.Length>=2.35 95 40 virginica (0.00000000 0.42105263
0.57894737)
## 6) Petal.Length< 4.85 47 7 versicolor (0.00000000 0.85106383
0.14893617)
## 12) Sepal.Length>=4.95 39 1 versicolor (0.00000000 0.97435897
0.02564103) *
## 13) Sepal.Length< 4.95 8 2 virginica (0.00000000 0.25000000
0.75000000) *
## 7) Petal.Length>=4.85 48 0 virginica (0.00000000 0.00000000
1.00000000) *

model_adaboost$weights

## [1] 1.534026 1.531765


##Akurasi
#menggunakan model untuk memprediksi data test
pred_test = predict(model_adaboost, test)
pred_test$confusion

## Observed Class
## Predicted Class setosa versicolor virginica
## setosa 5 0 0
## versicolor 0 5 0
## virginica 0 0 5

pred_test$error

## [1] 0

#menggunakan Cross-Validation
cvmodel <- boosting.cv(Species~., data=data1, boos=TRUE, mfinal=2, v=10)

## i: 1 Sun Sep 18 22:42:07 2022


## i: 2 Sun Sep 18 22:42:07 2022
## i: 3 Sun Sep 18 22:42:07 2022
## i: 4 Sun Sep 18 22:42:07 2022
## i: 5 Sun Sep 18 22:42:07 2022
## i: 6 Sun Sep 18 22:42:07 2022
## i: 7 Sun Sep 18 22:42:07 2022
## i: 8 Sun Sep 18 22:42:07 2022
## i: 9 Sun Sep 18 22:42:07 2022
## i: 10 Sun Sep 18 22:42:08 2022

print(cvmodel[-1])

## $confusion
## Observed Class
## Predicted Class setosa versicolor virginica
## setosa 50 0 0
## versicolor 0 46 6
## virginica 0 4 44
##
## $error
## [1] 0.06666667

Sumber :
https://www.datatechnotes.com/2018/03/classification-with-adaboost-model-in-r.html

You might also like