📄 gha.m
字号:
function [W, errvals]=gha(X, k, varargin)
% GHA Generalized Hebbian Algorithm for PCA
%
% [W, errvals] = gha(X, k, options)
% X is the set of input vectors. Each column of X is one sample.
% k is the number of principal components to extract.
% W is the matrix of principal components.
% errvals is an approximation of the error at every iteration
%
% options (specified by key/value pairs)
% 'rate' = 0.1 the learning rate
% 'W' = randn the initial component matrix
% 'niter' = 1 the number of iterations
% 'annealfunc' = 1 the annealing function, the current iteration
% 'printerr' = 0 boolean value to print the error
%
% Notes:
% - GHA extracts the components in reverse order. That is, the last
% component is the first principal component.
%
% David Gleich
% CS 152 - Neural Networks
% 12 December 2003
%
% get the dimensionality
[m n] = size(X);
% random initial weights
W = 0.1*randn(m,k);
options = struct(...
'rate', .1, ...
'W', W, ...
'niter', 1, ...
'annealfunc', inline('1'), ...
'printerr', 0);
options = getopt(options, varargin);
% validate options
if (size(options.W,1) ~= size(W,1) && size(options.W, 2) ~= size(W,2))
warning('Invalid initial set of weights, using random.');
options.W = W;
end;
W = options.W;
success = 0;
beta = options.rate;
err = 0;
iter = 1;
errvals = zeros(n*options.niter, 1);
for niter = 1:options.niter
Wold = W;
for ii = 1:n
x = X(:,ii);
y = W'*x;
W = beta*x*y' + W - beta*W*tril(y*y');
beta = options.rate*options.annealfunc(iter);
err = err + sum((x - W*(W'*x)).^2);
errvals(iter) = err/iter;
if (any(~isfinite(W)))
warning(sprintf('Lost convergence at iterator %i; lower learning rate?', niter));
success = 11;
break;
end;
if (options.printerr == 1)
fprintf('Error = %d; Iteration = %i\n', err/iter, iter);
end;
iter = iter+1;
end;
if (success > 0)
break;
end;
end;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -