Skip to main content

Gaussian Process GP



https://www.youtube.com/watch?v=16oPvgOd3UI

function GP_1d

kernel=5;
switch kernel
    case 1; k = @(x,y) 1*x'*y; % linear
    case 2; k = @(x,y) 1*min(x,y); % brownian motion
    case 3; k = @(x,y) exp(-100*(x-y)'*(x-y)); % squared
    case 4; k = @(x,y) exp(-1*sqrt(x-y)'*(x-y)); % Ornistin   
    case 5; k = @(x,y) exp(-1*sin(5*pi*(x-y))^2); % periodic     
end

% choose points at which to sample
x = (0:.005:1);
n = length(x);


% covariance matrix
C = zeros(n,n);
for i=1:n
   for j=1:n
      C(i,j) = k (x(i), x(j)) ;
   end
end

% sample from gaussian process at this points
u = randn(n,1);
[A, S , B ] = svd(C);
z = A *sqrt(S)*u;


% plot
figure(2); hold on;
plot(x, z, '.-');
axis([0, 1, -2, 2]);



end

============ IN 2D ========

function GP_2d

kernel=3;
switch kernel
    case 1; k = @(x,y) 1*x'*y; % linear
    case 2; k = @(x,y) 1*min(x,y); % brownian motion
    case 3; k = @(x,y) exp(-100*(x-y)'*(x-y)); % squared
    case 4; k = @(x,y) exp(-1*sqrt(x-y)'*(x-y)); % Ornistin   
    case 5; k = @(x,y) exp(-1*sin(5*pi*(x-y))^2 ); % periodic     
end

% choose points at which to sample
points = (0:0.05:1)';
[U,V] = meshgrid(points,points);
x = [U(:) V(:)]';
n = size(x,2);


% covariance matrix
C = zeros(n,n);
for i=1:n 
   for j=1:n
      C(i,j) = k (x(:,i), x(:,j)) ;
   end
end

% sample from gaussian process at this points
u = randn(n,1);
[A, S , B ] = svd(C);
z = A*sqrt(S)*u;


% plot
figure(2); clf;
Z = reshape(z,sqrt(n) , sqrt(n));
surf(U,V,Z);


end

Comments

Popular posts from this blog

MATLAB cross validation

// use built-in function samplesize = size( matrix , 1); c = cvpartition(samplesize,  'kfold' , k); % return the indexes on each fold ///// output in matlab console K-fold cross validation partition              N: 10    NumTestSets: 4      TrainSize: 8  7  7  8       TestSize: 2  3  3  2 ////////////////////// for i=1 : k    trainIdxs = find(training(c,i) ); %training(c,i);  // 1 means in train , 0 means in test    testInxs  = find(test(c,i)       ); % test(c,i);       // 1 means in test , 0 means in train    trainMatrix = matrix (  matrix(trainIdxs ), : );    testMatrix  = matrix (  matrix(testIdxs  ), : ); end //// now calculate performance %%  calculate performance of a partition     selectedKfoldSen=[];selectedKfoldSpe=[];selectedKfoldAcc=[];     indexSen=1;indexSpe=1;indexAcc=1;     if ( kfold == (P+N) )% leave one out         sensitivity = sum(cvtp) /( sum(cvtp) + sum(cvfn) )         specificity = sum(cvtn) /( sum(cvfp) + sum(cvtn) )         acc

R tutorial

Install R in linux ============ In CRAN home page, the latest version is not available. So, in fedora, Open the terminal yum list R  --> To check the latest available version of r yum install R --> install R version yum update R --> update current version to latest one 0 find help ============ ?exact topic name (  i.e.   ?mean ) 0.0 INSTALL 3rd party package  ==================== install.packages('mvtnorm' , dependencies = TRUE , lib='/home/alamt/myRlibrary/')   #  install new package BED file parsing (Always use read.delim it is the best) library(MASS) #library(ggplot2) dirRoot="D:/research/F5shortRNA/TestRIKEN/Rscripts/" dirData="D:/research/F5shortRNA/TestRIKEN/" setwd(dirRoot) getwd() myBed="test.bed" fnmBed=paste(dirData, myBed, sep="") # ccdsHh19.bed   tmp.bed ## Read bed use read.delim - it is the  best mybed=read.delim(fnmBed, header = FALSE, sep = "\t", quote = &q

MATLAB confusion matrix

%  test_class  & predicted_class must be same dimension % 'order' - describes the order of label. Here labels are 'g' as positive and 'h' as negative [C,order] = confusionmat( test_class(1: noSampleTest), predicted_class, 'order', ['g' ;'h'] ) tp = C(1,1); fn = C(1,2); fp = C(2,1); tn = C(2,2); sensitivity = tp /( tp + fn ) specificity = tn /( fp + tn ) accuracy = (tp+tn) / (tp+fn+fp+tn) tpr = sensitivity fpr = 1-specificity precision = tp /( tp + fp ) fVal = (2*tpr*precision)/(tpr+precision)