// use built-in function
samplesize = size( matrix , 1);
c = cvpartition(samplesize, 'kfold' , k); % return the indexes on each fold
///// output in matlab console
K-fold cross validation partition
N: 10
NumTestSets: 4
TrainSize: 8 7 7 8
TestSize: 2 3 3 2
//////////////////////
for i=1 : k
trainIdxs = find(training(c,i) ); %training(c,i); // 1 means in train , 0 means in test
testInxs = find(test(c,i) ); % test(c,i); // 1 means in test , 0 means in train
trainMatrix = matrix ( matrix(trainIdxs ), : );
testMatrix = matrix ( matrix(testIdxs ), : );
end
%% calculate performance of a partition
selectedKfoldSen=[];selectedKfoldSpe=[];selectedKfoldAcc=[];
indexSen=1;indexSpe=1;indexAcc=1;
if ( kfold == (P+N) )% leave one out
sensitivity = sum(cvtp) /( sum(cvtp) + sum(cvfn) )
specificity = sum(cvtn) /( sum(cvfp) + sum(cvtn) )
accuracy = (sum(cvtp)+sum(cvtn)) / ( sum(cvtp) + sum(cvfn) + sum(cvfp) + sum(cvtn) )
else
sensitivity=[]; specificity=[];accuracy=[];
for i=1: kfold
if( ( cvtp(i) + cvfn(i) )==0) % no POSITIVE sample was selected for evaluation
% sensitivity(i) = 1 ;
else
sensitivity(indexSen) = cvtp(i) /( cvtp(i) + cvfn(i) ) ;
indexSen = indexSen + 1;
selectedKfoldSen = [selectedKfoldSen i];
end
if ( cvfp(i) + cvtn(i) ) ==0 % no POSITIVE sample was selected for evaluation
% specificity(i)= 1 ;
else
specificity(indexSpe)= cvtn(i) /( cvfp(i) + cvtn(i) ) ;
indexSpe = indexSpe + 1;
selectedKfoldSpe = [selectedKfoldSpe i];
end
accuracy(i) = (cvtp(i)+ cvtn(i)) / ( cvtp(i) + cvfn(i) + cvfp(i) + cvtn(i) );
end
sen = mean(sensitivity)
spe = mean(specificity)
acc = mean(accuracy)
end
dlmwrite('cv',[ cvtp' ] , 'delimiter','\t','-append');
dlmwrite('cv',[ cvfn' ] , 'delimiter','\t','-append');
dlmwrite('cv',[ cvfp' ] , 'delimiter','\t','-append');
dlmwrite('cv',[ cvtn'] , 'delimiter','\t','-append');
dlmwrite('cv',[ selectedKfoldSen] , 'delimiter','\t','-append');
dlmwrite('cv',[ selectedKfoldSpe] , 'delimiter','\t','-append');
dlmwrite('cv',[ sensitivity] , 'delimiter','\t','-append');
dlmwrite('cv',[ specificity] , 'delimiter','\t','-append');
dlmwrite('cv',[ accuracy] , 'delimiter','\t','-append');
samplesize = size( matrix , 1);
c = cvpartition(samplesize, 'kfold' , k); % return the indexes on each fold
///// output in matlab console
K-fold cross validation partition
N: 10
NumTestSets: 4
TrainSize: 8 7 7 8
TestSize: 2 3 3 2
//////////////////////
for i=1 : k
trainIdxs = find(training(c,i) ); %training(c,i); // 1 means in train , 0 means in test
testInxs = find(test(c,i) ); % test(c,i); // 1 means in test , 0 means in train
trainMatrix = matrix ( matrix(trainIdxs ), : );
testMatrix = matrix ( matrix(testIdxs ), : );
end
//// now calculate performance
%% calculate performance of a partition
selectedKfoldSen=[];selectedKfoldSpe=[];selectedKfoldAcc=[];
indexSen=1;indexSpe=1;indexAcc=1;
if ( kfold == (P+N) )% leave one out
sensitivity = sum(cvtp) /( sum(cvtp) + sum(cvfn) )
specificity = sum(cvtn) /( sum(cvfp) + sum(cvtn) )
accuracy = (sum(cvtp)+sum(cvtn)) / ( sum(cvtp) + sum(cvfn) + sum(cvfp) + sum(cvtn) )
else
sensitivity=[]; specificity=[];accuracy=[];
for i=1: kfold
if( ( cvtp(i) + cvfn(i) )==0) % no POSITIVE sample was selected for evaluation
% sensitivity(i) = 1 ;
else
sensitivity(indexSen) = cvtp(i) /( cvtp(i) + cvfn(i) ) ;
indexSen = indexSen + 1;
selectedKfoldSen = [selectedKfoldSen i];
end
if ( cvfp(i) + cvtn(i) ) ==0 % no POSITIVE sample was selected for evaluation
% specificity(i)= 1 ;
else
specificity(indexSpe)= cvtn(i) /( cvfp(i) + cvtn(i) ) ;
indexSpe = indexSpe + 1;
selectedKfoldSpe = [selectedKfoldSpe i];
end
accuracy(i) = (cvtp(i)+ cvtn(i)) / ( cvtp(i) + cvfn(i) + cvfp(i) + cvtn(i) );
end
sen = mean(sensitivity)
spe = mean(specificity)
acc = mean(accuracy)
end
dlmwrite('cv',[ cvtp' ] , 'delimiter','\t','-append');
dlmwrite('cv',[ cvfn' ] , 'delimiter','\t','-append');
dlmwrite('cv',[ cvfp' ] , 'delimiter','\t','-append');
dlmwrite('cv',[ cvtn'] , 'delimiter','\t','-append');
dlmwrite('cv',[ selectedKfoldSen] , 'delimiter','\t','-append');
dlmwrite('cv',[ selectedKfoldSpe] , 'delimiter','\t','-append');
dlmwrite('cv',[ sensitivity] , 'delimiter','\t','-append');
dlmwrite('cv',[ specificity] , 'delimiter','\t','-append');
dlmwrite('cv',[ accuracy] , 'delimiter','\t','-append');
this code is for k-fold cross validation? if i want to apply it in the neural network,specifically MLP,which part of coding should i add this?
ReplyDeletehallo in case my data set has is n-dimensional column vectors what command should i use to partition my data? i tried "crossvalind" to generate indices to each column vector but i get an error.
ReplyDeletewhat are the definitions of "cvtp", "cvfn"? I can not read along when I hit that 2 parameters.
ReplyDeleteundefined variable cvfn ,cvtp,cvfp and cvtn !!!!!
ReplyDeleteYour code has a mistake, correct code:
ReplyDeletesamplesize = size( matrix , 1);
c = cvpartition(samplesize, 'kfold' , k); % return the indexes on each fold
% ///// output in matlab console
% K-fold cross validation partition
% N: 10
% NumTestSets: 4
% TrainSize: 8 7 7 8
% TestSize: 2 3 3 2
% //////////////////////
for i=1 : k
trainIdxs = find(training(c,i) ); %training(c,i); // 1 means in train , 0 means in test
testIdxs = find(test(c,i) ); % test(c,i); // 1 means in test , 0 means in train
trainMatrix = matrix (trainIdxs, : );
testMatrix = matrix (testIdxs, : );
end