📄 新建 文本文档 (2).txt
字号:
function [alpha, trainY] = dualPLS(K,Y,T,str)
% PLS Regression & Classificaion algorithm
%
% Usage: [alpha, trainY] = dualPLS(K,Y,T,str)
%
% Inputs:
% K is an (l x l) kernel matrix
% Y is an (l x m) containing the corresponding output vectors
% T gives the number of iterations to be preformed
% str - defines whether it will return the features or for regression
% input = 'fs' | 'reg'
%
% outputs
% alpha is the regression coefficients
% trainY is the training outputs
%
% David R. Hardoon - [email]drh@ecs.soton.ac.uk[/email]
% Created - 10/02/04
%
% 31/01/05 - Added some possible bug handling
%
% No commercial use.
% Any modification, please email me a copy.
if (nargin < 4)
alpha = 0;
trainY = 0;
disp('Please define all inputs');
help dualPLS;
return;
end
disp('.... Initialising Data')
% Craig note -- the following two lines should not really be there
% this is for centering in primal space -- however it doesn't actually affect
% as we re-center the kernel matrix later
%mu = mean(K)
disp('.... Initialisation Completed - Entering Algorithm');
for i=1:T
YYK = YY*YY'*KK;
beta(:,i) = YY(:,1)/norm(YY(:,1));
if size(YY,2) > 1 % only loop if dimension greater than 1
bold = beta(:,i) + 1;
while ((norm(beta(:,i) - bold) > 0.001) & (count < 50))
count = count + 1;
bold = beta(:,i);
tbeta = YYK*bold;
beta(:,i) = tbeta/norm(tbeta);
end
end
tau(:,i) = KK*beta(:,i);
val = norm(tau(:,i))^2;
c(:,i) = YY'*tau(:,i)/val;
trainY = trainY + tau(:,i)*c(:,i)';
trainerror = norm(Y - trainY,'fro')/sqrt(l);
w = KK*tau(:,i)/val;
KK = KK - tau(:,i)*w' - w*tau(:,i)'+tau(:,i)*tau(:,i)'*(tau(:,i)'*w)/val;
YY = YY - tau(:,i)*c(:,i)';
end
if strcmp(str,'fs')
disp('.... Computing Features');
alpha = beta*inv(tau'*K*beta)*diag(diag(tau'*tau));
elseif strcmp(str,'reg')
disp('.... Computing Regression Values')
alpha = beta*inv(tau'*K*beta)*tau'*Y;
end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -