📄 paris' ica.m
字号:
*********************************************************************
Driver ICA/BSS function
ep is epochs to train
p is batch size
h is learning rate
m is momentum
*********************************************************************
function [out, w, b] = bsep( mix, ep, p, h, m);
if nargin == 4
m = 0;
end
% make permuted timeindex
i = randperm( cols( mix));
% prewhiten data (ensures zero-mean and secord-order independence)
[wmix, v, mn] = prewhite( mix(:,i));
% make initial weights and bias
w = diag(ones( 1, rows( mix)), 0);
b = zeros( 1, rows( mix));
% find unmixing matrix
[w, b] = sepcore( wmix, w, b, ep, p, h, m);
% separate
out = w * mix;
*********************************************************************
Core ICA/BSS function, save as sepcore.m
*********************************************************************
function [w, b] = sepcore( in, w, b, ep, p, h, mth)
% set default method to bell
if nargin == 6
mth = 0;
end
% remember and setup some stuff
mm = floor(length(in)/p);
% count time
tic
% Bell & Sejnowski method
%
% Working command: bsep( smix, 2, 20, .05, 0);
%
% Comments:
% As decribed in: "An Information-Maximization Approach
% to Blind Separation and Blind Deconvolution",
% from Neural Computation, 7, 6 Nov(?) 95.
% Works good. I'm not sure whether I should keep the biases ...
% But it works great!
%
% The commented out delta rule is Bell's original.
% The one right under it is Bell's rule after Amari's suggestions
im = eye( rows( in));
if mth == 0
on = ones( 1, p);
for i = 1:ep
for k = 1:p:mm*p
x = in( :, k:k+p-1);
u = w * x;
y = ptanh( u + b' * on);
%dw = p * inv( w') - (2 * y * x');
dw = p * ( im - 2 * y * u') * w;
db = sum( -2 * y');
w = w + h * dw;
b = b + h * db;
end
end
% Amari, Cichocki & Young method
%
% Working command: bsep( smix, 2, 20, .05, 1);
%
% Comments:
% As described in "A New Learning Algorithm for
% Blind Signal Separation", from NIPS 8 1996.
% Works pretty good. Vector fy can be any monotonic function.
% The big and weird one (commented out) is in the paper
% but it sucks. Can also use the identity function. The tanh function
% is pretty good too but twice as slow.
elseif mth == 1
im = eye( rows( in));
for i = 1:ep
for k = 1:p:mm*p
y = w * in( :, k:k+p-1);
% fy = .75*(y.^11)+6.23*(y.^9)-4.6667*(y.^7)...
% -11.75*(y.^5)+14.5*(y.^3);
fy = ptanh( y);
dw = (im - fy * y') * w;
w = w + h * dw;
end
end
% Cichocki, Kasprzak & Amari method
%
% Working command: bsep( 100*smix, 2, 20, .000000001, 2);
%
% Comments:
% As described in: "Local Adaptive Learning Algorithms
% for Blind Separation of Natural Images" from ??(I forget right now...)
%
% Right now I'm using the global learning rule. Maybe I'll
% implement the local one as well ...
elseif mth == 2
im = eye( rows( in));
y = in;
for i = 1:ep
for k = 1:p:mm*p
y = w * in( :, k:k+p-1);
gy = y.^3;
fy = y;
dw = (im - fy * gy') * w';
w = w + h * dw;
end
end
end
% report time
toc
*********************************************************************
Misc utilities
*********************************************************************
function r = rows( w) % save as rows.m
% Returns the number of rows of matrix w.
[r, c] = size( w);
-----------
function c = cols( w) % save as cols.m
% Returns the number of columns of matrix w.
[r, c] = size( w);
-----------
function out = ptanh( x) % save as ptanh.m
% Just like tanh but way faster
out = 1 - 2 ./ (exp( 2*x) + 1);
-----------
function [out, v, m] = prewhite( in) % save as prewhite.m
% De-bias
m = mean( in');
for i = 1:rows(in)
out(i,:) = (in(i,:) - m(i));
end
% Decorrelate
v = 2 * sqrtm( inv( out * out'));
out = v * out;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -