Professional Documents
Culture Documents
AUC =
0.7918
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
MATLAB Command Window Page 2
>>
>>
>>
>> % 2. Compare Classification Methods Using ROC Curve
%Compute the standard ROC curve using the probabilities for scores.
[Xlog,Ylog,Tlog,AUClog] = perfcurve(resp,score_log,'true');
%Train an SVM classifier on the same sample data. Standardize the data.
mdlSVM = fitcsvm(pred,resp,'Standardize',true);
%Compute the standard ROC curve using the scores from the SVM model.
[Xsvm,Ysvm,Tsvm,AUCsvm] = perfcurve(resp,score_svm(:,mdlSVM.ClassNames),'true');
%Compute the standard ROC curve using the scores from the naive Bayes classification.
[Xnb,Ynb,Tnb,AUCnb] = perfcurve(resp,score_nb(:,mdlNB.ClassNames),'true');
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>> %Compare the area under the curve for all three classifiers.
AUClog
AUCsvm
AUCnb
AUClog =
0.9659
AUCsvm =
0.9489
AUCnb =
0.9393
>>
>> % 3. Determine the Parameter Value for Custom Kernel Function
%This example shows how to determine the better parameter value for a
%custom kernel function in a classifier using the ROC curves.
r2 = sqrt(rand(2*n,1));
t2 = [pi/2*rand(n,1)+pi/2; (pi/2*rand(n,1)-pi/2)]; % Random angles for Q2 and Q4
X2 = [r2.*cos(t2) r2.*sin(t2)];
%Define the predictor variables. Label points in the first and third
%quadrants as belonging to the positive class, and those in the second
%and fourth quadrants in the negative class.
pred = [X1; X2];
resp = ones(4*n,1);
resp(2*n + 1:end) = -1; % Labels
SVMModel2 = fitcsvm(pred,resp,'KernelFunction','mysigmoid2',...
'Standardize',true);
SVMModel2 = fitPosterior(SVMModel2);
[~,scores2] = resubPredict(SVMModel2);
%Compute the ROC curves and the area under the curve (AUC) for both models.
[x1,y1,~,auc1] = perfcurve(resp,scores1(:,2),1);
[x2,y2,~,auc2] = perfcurve(resp,scores2(:,2),1);
>>
>>
>>
>> auc1
auc2
auc1 =
0.9518
auc2 =
0.9985
>>
>>
>> % 4. Plot ROC Curve for Classification Tree
% Train a classification tree using the sepal length and width as the
% predictor variables. It is a good practice to specify the class names
Model = fitctree(meas(:,1:2),species, ...
'ClassNames',{'setosa','versicolor','virginica'});
%Predict the class labels and scores for the species based on the tree Model.
[~,score] = resubPredict(Model);
OPTROCPT
suby
subnames
MATLAB Command Window Page 6
OPTROCPT =
0.1000 0.8000
suby =
0 0
0.1800 0.1800
0.4800 0.4800
0.5800 0.5800
0.6200 0.6200
0.8000 0.8000
0.8800 0.8800
0.9200 0.9200
0.9600 0.9600
0.9800 0.9800
1.0000 1.0000
1.0000 1.0000
subnames =
{'setosa'} {'virginica'}
>> %Plot the ROC curve and the optimal operating point on the ROC curve.
plot(X,Y)
hold on
plot(OPTROCPT(1),OPTROCPT(2),'ro')
xlabel('False positive rate')
ylabel('True positive rate')
title('ROC Curve for Classification by Classification Trees')
hold off
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
MATLAB Command Window Page 7
>> %Find the threshold that corresponds to the optimal operating point.
T((X==OPTROCPT(1))&(Y==OPTROCPT(2)))
%Again, you must supply perfcurve with a function that factors in the
%scores of the negative class
diffscore = score(:,2) - score(:,3);
[X,Y,~,~,OPTROCPT] = perfcurve(species,diffscore,'versicolor', ...
'negClass','virginica');
ans =
0.2857
OPTROCPT =
0.1800 0.8200
>>
>> % 5. Compute Pointwise Confidence Intervals for ROC Curve
>>
>>
>>
>>
>>
>>
>>
>>
>> %Specify the threshold values to fix and compute the ROC curve. Then plot the curve.
[X1,Y1,T1] = perfcurve(species(51:end,:),mdl.Fitted.Probability,...
'virginica','NBoot',1000,'TVals',0:0.05:1);
>> figure()
errorbar(X1(:,1),Y1(:,1),Y1(:,1)-Y1(:,2),Y1(:,3)-Y1(:,1));
xlim([-0.02,1.02]); ylim([-0.02,1.02]);
xlabel('False positive rate')
ylabel('True positive rate')
title('ROC Curve with Pointwise Confidence Bounds')
legend('PCBwTA','Location','Best')
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>