Skip to main content

MATLAB ANN artificial neural network train test


sample1: 1 2 3 4  label: A
sample2: 1 5 7 7  label: B

  Every sample must be put in a column
=============================
featureIn
1 1
2 5
3 7
4 7
featureOut
A
B

function [yPredict] = doBP(trainFeature,trainValue)

trainFeature = trainFeature'; % to fit matlab format
trainValue = trainValue';% to fit matlab format



% % version 2010a
 net=newff(trainFeature,trainValue,[13 1],{'tansig' 'purelin'}); % tansig purelin

% version 2009a
% net=newff(trainFeature,trainValue,[13 1]);
% net.layers{1}.transferFcn = 'tansig';
% net.layers{2}.transferFcn = 'purelin';



net=init(net);

net.trainParam.epochs = 99999999;
net.trainParam.goal = 0.0000001; %(stop training if the error goal hit)
net.trainParam.lr= 0.000001; % (learning rate, not default trainlm) [0.01]
net.trainParam.epochs = 99999999;
net.trainParam.goal = 0.0000001; %(stop training if the error goal hit)
net.trainParam.lr= 0.000001; % (learning rate, not default trainlm) [0.01]
% net.trainParam.lr_dec = 0.000001;
% net.trainParam.mc = 0.9;
% net.trainParam.min_grad = 1e-10;
net.trainParam.show=1 ; %(no. epochs between showing error) [25]
net.trainParam.time =100000; %    (Max time to train in sec) [inf]
net.trainFcn = 'trainlm'; % trainrp trainbfg  trainlm

net.divideParam.trainRatio = 80/100;  % Adjust as desired
net.divideParam.valRatio = 20/100;  % Adjust as desired
net.divideParam.testRatio = 0/100;  % Adjust as desired


% TRAIN
[net,tr,Ytrain,E,Pf,Af] = train(net,trainFeature,trainValue);  %train(net,subset_active_input',subset_active_output');
plotperf(tr);

save net; % will save the network (net) as net.mat.

end

function doTesting(testFeatureIn)

      testFeatureIn = testFeatureIn';  % to fit matlab format
      testFeatureOut = testFeatureOut';  % to fit matlab format
      load net ;% will retrive the network and put it in your workspace

      [predictedY,Pf,Af,E,perf] = sim(net,testFeatureIn);
end

Comments

Popular posts from this blog

MATLAB cross validation

// use built-in function samplesize = size( matrix , 1); c = cvpartition(samplesize,  'kfold' , k); % return the indexes on each fold ///// output in matlab console K-fold cross validation partition              N: 10    NumTestSets: 4      TrainSize: 8  7  7  8       TestSize: 2  3  3  2 ////////////////////// for i=1 : k    trainIdxs = find(training(c,i) ); %training(c,i);  // 1 means in train , 0 means in test    testInxs  = find(test(c,i)       ); % test(c,i);       // 1 means in test , 0 means in train    trainMatrix = matrix (  matrix(trainIdxs ), : );    testMatrix  = matrix (  matrix(testIdxs  ), : ); end //// now calculate performance %%  calculate performance of a partition     selectedKfoldSen=[];selectedKfoldSpe=[];selectedKfoldAcc=[];     indexSen=1;indexSpe=1;indexAcc=1;     if ( kfold == (P+N) )% leave one out         sensitivity = sum(cvtp) /( sum(cvtp) + sum(cvfn) )         specificity = sum(cvtn) /( sum(cvfp) + sum(cvtn) )         acc

R tutorial

Install R in linux ============ In CRAN home page, the latest version is not available. So, in fedora, Open the terminal yum list R  --> To check the latest available version of r yum install R --> install R version yum update R --> update current version to latest one 0 find help ============ ?exact topic name (  i.e.   ?mean ) 0.0 INSTALL 3rd party package  ==================== install.packages('mvtnorm' , dependencies = TRUE , lib='/home/alamt/myRlibrary/')   #  install new package BED file parsing (Always use read.delim it is the best) library(MASS) #library(ggplot2) dirRoot="D:/research/F5shortRNA/TestRIKEN/Rscripts/" dirData="D:/research/F5shortRNA/TestRIKEN/" setwd(dirRoot) getwd() myBed="test.bed" fnmBed=paste(dirData, myBed, sep="") # ccdsHh19.bed   tmp.bed ## Read bed use read.delim - it is the  best mybed=read.delim(fnmBed, header = FALSE, sep = "\t", quote = &q