📄 cica.m
字号:
function [y,W,CPUtime,step]=cica(X,gaussian_type,ref_sig,norm)
% CICA-Constrained Independent Component Analysis
%CICA estimates one independent component with its reference signal.
%
%[y,W]=cica(X,gaussian_type,ref_sig,norm): outputs the expected IC of a row
% vector and the separating column vector
% W
%
%
% Parameter name Values and description
%
% X The mixed signals (a matrix of n*m).Each row of matrix mixedsig
% is one observed signal.
%
% gaussian_type The distribution type of the expected
% independent component. Possible
% values:
% 'sup',i.e., super_Gaussian source
% 'sub',i.e., sub_Gaussian source.
%
% ref_sig The reference signal, a row vector,
% has m elements.
%
% norm The closeness measure.Possible
% values:
% 'mse' Epsilon=E((y-ref_sig)^2)
% 'co1' Epsilon=-E(y*ref_sig)
% 'co2' Epsilon=1/(E(y*ref_sig)^2)
%
n=size(X,1);
m=size(X,2);
ref_sig=(ref_sig-mean(ref_sig))/std(ref_sig);
%PCA
%===============================================================
[V,D] = eig(X*X'/m);
eigenvalue=diag(D);
t1=eigenvalue(1)/sum(eigenvalue);
k1=1;
while t1<0.01
k1=k1+1;
t1=t1+eigenvalue(k1)/sum(eigenvalue);
end
V=V(:,k1:n);
X=V'*X;
%===============================================================
n=n-k1+1;
W=(rand(n,1)-0.5)*0.5;
R=(X*X'/m);
R_contrary=R^-1;
%Parameter
%=================================================================
l_rate1=1; %Learning rate for Mu
l_rate2=1; %Learning rate for W
l_rate3=1; %Learning rate for Lambda
No_step=1000; %Number of iterations
%Threshold
% threshold=1 for mse;threshold=-0.5 for correlation1 ;threshold=4 for correlation2
if norm=='mse'
threshold=1;
end
if norm=='co1'
threshold=-0.5;
end
if norm=='co2'
threshold=4;
end
%=================================================================
p=5; %A positive constant for contrast function
a=1; %A positive constant for super-Gaussian fuction a
b=1; %A positive constant for sub-Gaussian fuction b
%=================================================================
Mu=0;
Lambda=0;
cputime_start=cputime;
if gaussian_type=='sup'
flag=1;
kk=1;
while flag
y=W'*X;
Lambda=Lambda+l_rate3*((mean(y.^2)-1)^2);
if norm=='mse'
y_normalized=(y-mean(y))/std(y);
Mu=Mu+max(-Mu,l_rate1*(mean((y_normalized-ref_sig).^2)-threshold)); % mse
a1=(-2*p*(mean(log(cosh(a*y))/a-a*y.^2/2)+0.1254))*X*(tanh(a*y)-a*y)'/m+4*Lambda*(mean(y.^2)-1)*(X*y')/m+2*Mu*X*(y-ref_sig)'/m;
d=Mu*2+8*Lambda-(2*p*(mean(log(cosh(a*y))/a-a*y.^2/2)+0.1254))*a*mean((sech(a*y)).^2-1);
W=W-l_rate2*R_contrary*a1./d;
end
if norm=='co1'
y_normalized=(y-mean(y))/std(y);
Mu=Mu+max(-Mu,l_rate1*(-y_normalized*ref_sig'/m-threshold)); % correlation1
a1=(-2*p*(mean(log(cosh(a*y))/a-a*y.^2/2)+0.1254))*X*(tanh(a*y)-a*y)'/m+4*Lambda*(mean(y.^2)-1)*(X*y')/m-Mu*X*ref_sig'/m;
d=8*Lambda-(2*p*(mean(log(cosh(a*y))/a-a*y.^2/2)+0.1254))*a*mean((sech(a*y)).^2-1);
W=W-l_rate2*R_contrary*a1./d;
end
if norm=='co2'
y_normalized=(y-mean(y))/std(y);
Mu=Mu+max(-Mu,l_rate1*((y_normalized*ref_sig'/m).^-2-threshold)); % correlation2
a1=(-2*p*(mean(log(cosh(a*y))/a-a*y.^2/2)+0.1254))*X*(tanh(a*y)-a*y)'/m+4*Lambda*(mean(y.^2)-1)*(X*y')/m-4*((((y-mean(y))/std(y))*ref_sig'/m).^-3)*Mu*X*ref_sig'/m;
dd=(8*Lambda-(2*p*(mean(log(cosh(a*y))/a-a*y.^2/2)+0.1254))*a*mean((sech(a*y)).^2-1))*R+6*Mu*((y_normalized*ref_sig'/m).^-4)*((X*ref_sig'/m)*(X*ref_sig'/m)');
W=W-l_rate2*(dd^-1)*a1;
end
% if sum(abs(W))>500|sum(abs(y))>10000
% W=(rand(n,1)-0.5);kk=0;Mu=0;Lambda=0;
% end
%
tt(kk)=sum(abs(W).^2)^(1/2);
tr(kk)=W'*X*ref_sig'/m;
if kk>10 % learning 150 step at least
if kk>No_step|(abs((tt(kk)-tt(kk-1))<0.0001)&abs((tr(kk)-tr(kk-1))<0.0001))
flag=0;
end
end
kk=kk+1;
end
end
if gaussian_type=='sub'
flag=1;
kk=1;
while flag
y=W'*X;
Lambda=Lambda+l_rate3*((mean(y.^2)-1)^2);
if norm=='mse'
y_normalized=(y-mean(y))/std(y);
Mu=Mu+max(-Mu,l_rate1*(mean((y_normalized-ref_sig).^2)-threshold));
a1=-2*p*(mean(b*y.^4/4-0.75))*X*(b*y.^3)'/m+4*Lambda*(mean(y.^2)-1)*(X*y')/m+2*Mu*X*(y-ref_sig)'/m;
d=Mu*2+8*Lambda-2*p*(mean(b*y.^4/4-0.75))*mean(3*b*y.^2);
W=W-l_rate2*R_contrary*a1./d;
end
if norm=='co1'
y_normalized=(y-mean(y))/std(y);
Mu=Mu+max(-Mu,l_rate1*(-y_normalized*ref_sig'/m-threshold));
a1=-2*p*(mean(b*y.^4/4-0.75))*X*(b*y.^3)'/m+4*Lambda*(mean(y.^2)-1)*(X*y')/m-Mu*X*ref_sig'/m;
d=8*Lambda-2*p*(mean(b*y.^4/4-0.75))*mean(3*b*y.^2);
W=W-l_rate2*R_contrary*a1./d;
end
if norm=='co2'
y_normalized=(y-mean(y))/std(y);
Mu=Mu+max(-Mu,l_rate1*((y_normalized*ref_sig'/m).^-2-threshold));
a1=-2*p*(mean(b*y.^4/4-0.75))*X*(b*y.^3)'/m+4*Lambda*(mean(y.^2)-1)*(X*y')/m-4*((y_normalized*ref_sig'/m).^-3)*Mu*X*ref_sig'/m;
dd=(8*Lambda-2*p*(mean(b*y.^4/4-0.75))*mean(3*b*y.^2))*R+6*Mu*((y_normalized*ref_sig'/m).^-4)*((X*ref_sig'/m)*(X*ref_sig'/m)');
W=W-l_rate2*(dd^-1)*a1;
end
if sum(abs(W))>500|sum(abs(y))>10000
W=(rand(n,1)-0.5);kk=0;Mu=0;Lambda=0;
end
tt(kk)=sum(abs(W).^2)^(1/2);
tr(kk)=W'*X*ref_sig'/m;
if kk>10 % learning 150 step at least
if kk>No_step|(abs((tt(kk)-tt(kk-1))<0.0001)&abs((tr(kk)-tr(kk-1))<0.0001))
flag=0;
end
end
kk=kk+1;
end
end
CPUtime=cputime-cputime_start
% tt=abs(tt);
% figure(10),plot(tt(1:size(tt,2)));
% ttt=abs(tt(1:size(tt,2)-1)-tt(2:size(tt,2)));
% figure(11),plot(ttt(1:size(ttt,2)));
% figure(12),plot(tr(1:size(tr,2)));
y=W'*X;figure(1),plot(y);
step=kk-1
1==1;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -