📄 forwardselect.m
字号:
function [subset, H, l, U, A, w, P] = forwardSelect(F, Y, options)% [subset, H, l, U, A, w, P] = forwardSelect(F, Y, options)%% Regularised orthogonal least squares algorithm.% See "Regularisation in the Selection of Radial Basis% Function Centres", 1995, Orr, M.J.L., Neural Computation,% 7(3):606-623.%% Inputs%% F Design matrix of selectable centres (p-by-M)% Y Output training data (p-by-n)% options Control options (string)%% Output%% subset Indices of selected columns of F (1-by-m)% H Subset of F (p-by-m)% l regularisation parameter (real and non-negative)% U U the upper triangular tarnsform (m-by-m)% A inv(H'*H + l * U' * U) (m-by-m)% w A * H' * Y (m-by-n)% P I - H * A * H' (p-by-p)%% Preliminaries.[ph,M] = size(F);[py,n] = size(Y);if py ~= ph error('forwardSelect: design and outputs have incompatible dimensions')endp = py;% Default optionsVerbose = 0;Flops = 0;Global = 0;Term = 'g';MaxAge = 2;MaxReg = 0;ReEst = 'n';Threshold = 0.9;lambda = 0.1;OLS = 0;% process optionsif nargin > 2 % initialise i = 1; [arg, i] = getNextArg(options, i); % scan through arguments while ~isempty(arg) if strcmp(arg, '-v') % verbose output required Verbose = 1; elseif strcmp(arg, '-V') % verbose output required with compute cost reporting Verbose = 1; Flops = 1; elseif strcmp(arg, '-t') % specify termination criterion: the method must be specified [arg, i] = getNextArg(options, i); method_given = 1; if strcmp(lower(arg), 'var') % use fraction of explained variance to terminate Term = 'e'; elseif strcmp(lower(arg), 'uev') % use UEV (unbiased expected variance) to terminate Term = 'u'; elseif strcmp(lower(arg), 'fpe') % use FPE (final prediction error) to terminate Term = 'f'; elseif strcmp(lower(arg), 'gcv') % use GCV (generalised cross-validation) to terminate Term = 'g'; elseif strcmp(lower(arg), 'bic') % use BIC (Bayesian information criterion) to terminate Term = 'b'; elseif strcmp(lower(arg), 'loo') % use GCV (generalised cross-validation) to terminate Term = 'o'; else % no, the method wasn't specified after all method_given = 0; end if method_given % read next arg [arg, ii] = getNextArg(options, i); else fprintf('forwardSelect: terminate with VAR, UEV, FPE, GCV, BIC or LOO\n') error('forwardSelect: bad or missing argument for -t option') end % is the value given? th = str2num(arg); if ~isempty(th) % a value, threshold or maximun age, is specified good_value = 1; if Term == 'e' & th > 0 & th < 1 % the value is a good one for threshold methods Threshold = th; elseif sum(Term == 'ufgbo') == 1 & th >= 1 % the value is a good one for maximum age methods MaxAge = round(th); else % actually, it's not a good value good_value = 0; end if good_value % get ready to advance to next arg i = ii; else fprintf('forwardSelect: acceptable termination values are:\n') fprintf(' VAR: 0 < value < 1\n') fprintf(' UEV, FPE, GCV, BIC or LOO: value >= 1\n') error('forwardSelect: bad value for -t option') end end elseif strcmp(arg, '-m') % maximum number of regressors allowed in subset - needs value [arg, i] = getNextArg(options, i); mr = str2num(arg); good_value = 1; if ~isempty(mr) MaxReg = round(str2num(arg)); if MaxReg < 1 good_value = 0; end else % the value argument is mandatory good_value = 0; end if ~good_value fprintf('forwardSelect: positive maximum size required\n') error('forwardSelect: bad or missing value for -m option') end elseif strcmp(arg, '-g') % global regularisation required Global = 1; % is the initial value of lambda given? [arg, ii] = getNextArg(options, i); ll = str2num(arg); if ~isempty(ll) if ll < 0 fprintf('forwardSelect: regularisation parameter should be > 0\n') error('forwardSelect: bad value for -g option') else lambda = ll; i = ii; end end elseif strcmp(arg, '-r') % turn global regularisation on Global = 1; % is method specified [arg, ii] = getNextArg(options, i); method_given = 1; if strcmp(lower(arg), 'uev') % use fraction of explained variance to re-estimate ReEst = 'u'; elseif strcmp(lower(arg), 'fpe') % use FPE (final prediction error) to re-estimate ReEst = 'f'; elseif strcmp(lower(arg), 'gcv') % use GCV (generalised cross-validation) to re-estimate ReEst = 'g'; elseif strcmp(lower(arg), 'bic') % use BIC (Bayesian information criterion) to re-estimate ReEst = 'b'; else % no, the method wasn't specified, set default method_given = 0; ReEst = 'g'; end if method_given % advance to next arg and read it i = ii; [arg, ii] = getNextArg(options, i); end % is a value given? ll = str2num(arg); if ~isempty(ll) % an initial value for lambda is specified if ll >= 0 lambda = ll; i = ii; else fprintf('forwardSelect: regularisation parameter should be > 0\n') error('forwardSelect: bad value for -r option') end end elseif strcmp(lower(arg), 'ols') % turn orthogonal least squares on OLS = 1; else fprintf('%s\n', options) for k = 1:i-length(arg)-1 fprintf(' '); end for k = 1:length(arg) fprintf('^'); end fprintf('\n') error('forwardSelect: unrecognised option') end % get next argument [arg, i] = getNextArg(options, i); endend% if global regularisation isn't being used then% set lambda to zeroif Global == 0 lambda = 0;end% initialiseContinue = 1;YY = traceProduct(Y', Y);m = 0;AgeOfMin = 0;if Verbose fprintf('\nforwardSelect\n') fprintf('pass add ') if ReEst ~= 'n' fprintf(' lambda ') end if Term == 'e' fprintf(' VAR ') elseif Term == 'u' fprintf(' UEV ') elseif Term == 'f' fprintf(' FPE ') elseif Term == 'g' fprintf(' GCV ') elseif Term == 'b' fprintf(' BIC ') else fprintf(' LOO ') end if Flops fprintf(' flops ') flops(0) end fprintf('\n')end% search for the most significant regressors.while Continue % increment number of regressors m = m + 1; if m == 1 % first regressor - initialise % get error change associated with each regressor err = rowSum((F' * Y).^2) ./ (lambda + diagProduct(F', F)); % select the maximum change mer = max(err); tot = mer / YY; can = find(err == mer); choose = can(1); subset = choose; % verbose if Verbose fprintf('%4d %5d ', m, choose); end if OLS % initialise Ho, the current orthogonalised design matrix % and Hn, which is the same as Ho but with normalised columns % and some other useful stuff fj = F(:,choose); fjTfj = fj' * fj; Ho(:,1) = fj; diagHoTHo = fjTfj; HoTY = fj' * Y; Hn(:,1) = fj / fjTfj; % initialise Fm ready for second iteration Fm = F - Hn(:,1) * (Ho(:,1)' * F); % initialise upper triangular matrix Um Um = 1; else % initialise Hm fj = F(:,choose); Hm = fj; % initialise Fm ready for second iteration Fm = F; Fm = F - (fj / (fj' * fj)) * (fj' * F); end else % get error change associated with each regressor numerator = rowSum((Fm' * Y).^2); denominator = lambda + diagProduct(Fm', Fm); denominator(subset) = ones(length(subset),1); % avoid division by zero err = numerator ./ denominator; % select the maximum change mer = max(err); tot = tot + mer / YY; can = find(err == mer); choose = can(1); subset = [subset choose]; % verbose if Verbose fprintf('%4d %5d ', m, choose); end if OLS % collect next columns for Ho and Hn and other useful stuff fj = Fm(:,choose); fjTfj = fj' * fj; Ho(:,m) = fj; diagHoTHo = [diagHoTHo; fjTfj]; HoTY = [HoTY; fj' * Y]; Hn(:,m) = fj / fjTfj; % recompute Fm ready for next iteration Fm = Fm - Hn(:,m) * (Ho(:,m)' * Fm); % update Um Um = [Um Hn(:,1:m-1)' * F(:,choose); zeros(1,m-1) 1]; else % update H Hm = [Hm F(:,choose)]; % recompute Fm ready for next iteration fj = Fm(:,choose); Fm = Fm - (fj / (fj' * fj)) * (fj' * Fm); end end % Re-estimate lambda if required if ReEst ~= 'n' if OLS YTHoTHoY = diagProduct(HoTY,HoTY'); ldiagHoTHo = lambda + diagHoTHo; l2diagHoTHo = lambda + ldiagHoTHo;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -