📄 oilpipe_demo.m
字号:
% Demo program for 3 classes% ==========================% Matlab code for Gaussian Processes for Classification:% GPCLASS version 0.2 10 Nov 97% Copyright (c) David Barber and Christopher K I Williams (1997)% This program is free software; you can redistribute it and/or modify% it under the terms of the GNU General Public License as published by% the Free Software Foundation; either version 2 of the License, or% any later version.%% This program is distributed in the hope that it will be useful,% but WITHOUT ANY WARRANTY; without even the implied warranty of% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the% GNU General Public License for more details.%% You should have received a copy of the GNU General Public License% along with this program; if not, write to the Free Software% Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.clear all; close all; st = fclose('all');GPCPATH = ['']; % this is where the GP code sitsDATAPATH = ['']; % this is the data directorypath(GPCPATH,path) % set up the path for GP routines% Set up the data, including a possible normalisation/scalingdata = 'oilpipe';filename=[DATAPATH,data,'.dat']; % this is where the data iscol = [1 0 1 0 1 0 1 0 1 0 1 0 0 0 2 2 2 0 0 ]; % which columns are attributes% 1 indicates that the attribute is to be included% 0 indicates that the attribute is not to be included% 2 indicates that this attribute is a class label% ( note: if there is more than one 2 in col, then % it is assumed that the class labels are in% the one-of-m class format (eg 0 1 0).% A single 2 denotes that the class labels% are integers (eg 2). )rows_tr = [1:40]; % rows of Dataset used for trainingrows_te = [41:100]; % rows of Dataset used for testing% split the data into training and test parts[in_all, out_all] = getmclasses(filename,col); % get datasetx_tr_un = in_all(:,rows_tr); out_tr = out_all(:,rows_tr);x_te_un = in_all(:,rows_te); out_te = out_all(:,rows_te);out_trte = [out_tr out_te];% do some scaling: inputs zero median, unit absolute deviation from medianmed_xtr = median(x_tr_un');x_tr = x_tr_un - med_xtr'*ones(1,size(x_tr_un,2));mean_abs_dev = mean(abs(x_tr'));x_tr = (1./mean_abs_dev'*ones(1,size(x_tr_un,2))).*x_tr;x_te = x_te_un - med_xtr'*ones(1,size(x_te_un,2));x_te = (1./mean_abs_dev'*ones(1,size(x_te_un,2))).*x_te;outfile = ['oil_results']; % results filename prefixmeth = 'ml_hmc'; % use MAP as inital w for HMC% other options are meth = 'ml' or meth = 'hmc'npc = length(find(col==1)) + 2; % number of parameters per classrand('state',0); % set the seedrandn('state',0);m = 3; % number of classesw = rand(1,m*npc); % initial paramtersparvec = pak(m, length(rows_tr), length(rows_te)); % a vector of useful parameters% MAP hyperparameter SCG searchoptions = zeros(1,18); % Default options vector.options(1) = 1; % Display error valuesoptions(14) = 10; % Number of iterationsoptions(9) = 0; % 1 => do a gradient check % HMC optionshmcopt(1) = 10; % number of retained sampleshmcopt(2) = 10; % trajectory lengthhmcopt(3) = 5; % burn inhmcopt(4) = 0.2; % step size% Set up the Gaussian hyperprior distributions for the parameters.% For M independent classes, there are M different sets% of covariance parameters to specify.% For each class, the first component is the scale% and the last is the bias. % scale and bias:for ci = 1:m mean_prior(ci,1) = -3; % mean scale var_prior(ci,1) = 9; % variance of scale mean_prior(ci,npc) = -3; % mean bias var_prior(ci,npc) = 9; % variance of biasend% input attribute hyperparameters:for ci = 1:m mean_prior(ci,2:npc-1) = -3.*ones(1,npc-2); var_prior(ci,2:npc-1) = 9.*ones(1,npc-2);endjitter = 0.01; % stabilization of covariancedriverm(data,col,outfile,DATAPATH,x_tr,out_tr,x_te,out_te,meth,... options,w,hmcopt,mean_prior,var_prior,jitter)% Prediction options when using the hyperparameter sample(s)reject = 0; % number of hyperparameter samples rejected when predictinggsmp = 100; % number of activation samples in softmax posterior average[m, ntr, nte, ntrte] = unpak(parvec);[ty_all,tru_all]=max(out_trte); % correct predictionstru = tru_all(1,ntr+1:ntrte);% MAP: [meanpred_all] = final_pred([outfile,'.ml'], reject, gsmp, parvec);[py_all,pred_all]=max(meanpred_all'); % GP predictionspred = pred_all(1,ntr+1:ntrte);fprintf(1,'\n\n\nMAP Results\n')correct_pred = find(pred-tru==0);wrong_pred = find(pred-tru);fprintf(1,'test error rate = %f percent\n',100*length(wrong_pred)/nte)% ARDfprintf(1,'\nMAP hyperparameters:')hyp_vec_all = getmat([outfile,'.ml.smp'],m*npc,0);hyp_mat_all = vec2mitheta(hyp_vec_all,m);fprintf(1,'\n class1 class2 class3\n')hyp_mat_all(:,2:npc-1)'fprintf(1,'\n covariance scale:\n')hyp_mat_all(:,1)'fprintf(1,'\n covariance bias:\n')hyp_mat_all(:,npc)'% HMC:[meanpred_all] = final_pred([outfile,'.vhmc'], reject, gsmp, parvec);[py_all,pred_all]=max(meanpred_all'); % threshold the predictionspred = pred_all(1,ntr+1:ntrte);fprintf(1,'\nHMC Results\n')correct_pred = find(pred-tru==0);wrong_pred = find(pred-tru);fprintf(1,'test error rate = %f percent\n',100*length(wrong_pred)/nte)% ARDhyp_vec_all = getmat([outfile,'.vhmc.smp'],m*npc*hmcopt(1),0);hyp_mat_all = zeros(hmcopt(1),m,npc);for i = 1:hmcopt(1) hyp_mat_all(i,:,:) = vec2mitheta(hyp_vec_all(1,1+(i-1)*m*npc:i*m*npc),m);endfprintf(1,'\nmean hyperparameters:')fprintf(1,'\n class1 class2 class3\n')temp = squeeze(mean(hyp_mat_all));temp(:,2:npc-1)'fprintf(1,'\n covariance scale:\n')temp(:,1)'fprintf(1,'\n covariance bias:\n')temp(:,npc)'fprintf(1,'Standard deviation of the hyperparameters:')fprintf(1,'\n class1 class2 class\n')temp2 = squeeze(std(hyp_mat_all));temp2(:,2:npc-1)'fprintf(1,'\n covariance scale:\n')temp2(:,1)'fprintf(1,'\n covariance bias:\n')temp2(:,npc)'
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -