📄 rbf_mip.m
字号:
function Outputs=RBF_MIP(training_bags,test_bags,train_target,num_cluster,mu,bag_dist_measure,cluster_dist_measure)
%For details of RBF-MIP algorithm, please refer to reference [1]
% Syntax
%
% Outputs=RBF_MIP(training_bags,test_bags,train_target,num_cluster,mu,bag_dist_measure,cluster_dist_measure)
%
% Description
%
% RBFMIP takes,
% training_bags - An Mx1 cell, where the ith training bag is stored in training_bags{i,1}
% test_bags - An Nx1 cell, where the ith test bag is stored in test_bags{i,1}
% train_target - A QxM array, if the ith training bag belongs to the jth class, then train_target(j,i) equals +1, otherwise train_target(j,i) equals -1
% num_cluster - The number of clusters remaining in the first layer
% mu - The mu parameter used to determine sigma
% bag_dist_measure - Mode for measuring the distance between bags: 1 for minimum Hausdorff distance; 2 for maximum Hausdorff distance
% cluster_dist_measure - Mode for measuring the distance between clusters: 1 for minimum Hausdorff distance; 2 for maximum Hausdorff distance
% and returns,
% Outputs - A QxN array, where the output of the ith test bag on the jth class is stored in Outputs{j,i}
%
%[1] M.-L. Zhang and Z.-H. Zhou. Adapting RBF neural networks for multi-instance learning. Neural Processing Letters, 2006, 23(1): 1-26.
[num_class,num_training]=size(train_target);
[num_test,tempvalue]=size(test_bags);
clustering=cell(num_training,1);
for i=1:num_training
clustering{i,1}=i;
end
distance_matrix=zeros(num_training,num_training);
for bags1=1:(num_training-1)
if(mod(bags1,100)==0)
disp(strcat('Computing distance for bags:',num2str(bags1)));
end
for bags2=(bags1+1):num_training
if(bag_dist_measure==1)
distance_matrix(bags1,bags2)=minHausdorff(training_bags{bags1,1},training_bags{bags2,1});
else
distance_matrix(bags1,bags2)=maxHausdorff(training_bags{bags1,1},training_bags{bags2,1});
end
end
end
distance_matrix=distance_matrix+distance_matrix';
cluster_distance=tril(realmax*ones(num_training,num_training))+triu(distance_matrix,1);
[clustering,cluster_distance]=base_opti(training_bags,clustering,num_cluster,distance_matrix,cluster_distance,cluster_dist_measure);
matrix_fai=zeros(num_training,num_cluster+1);
target=zeros(num_training,1);
for i=1:num_training
matrix_fai(i,1)=1;
for j=1:num_cluster
tempsize=size(clustering{j,1});
num_bags=tempsize(2);
temp_dist=zeros(1,num_bags);
for k=1:num_bags
temp_dist(1,k)=distance_matrix(i,clustering{j,1}(1,k));
end
if (cluster_dist_measure==1)
matrix_fai(i,j+1)=min(min(temp_dist));
else
dist1=max(min(temp_dist));
dist2=max(min(temp_dist'));
matrix_fai(i,j+1)=max(dist1,dist2);
end
end
end
average_dist=0;
for i=1:(num_cluster-1)
for j=(i+1):num_cluster
average_dist=average_dist+cluster_distance(i,j);
end
end
average_dist=(average_dist*2)/(num_cluster*(num_cluster-1));
sigma=mu*average_dist;
for i=1:num_training
for j=1:num_cluster
matrix_fai(i,j+1)=exp((-matrix_fai(i,j+1)^2)/(2*sigma^2)); %using the Gaussian kernel function
end
end
test_data=zeros((num_cluster+1),num_test);
for i=1:num_test
input=ones(num_cluster+1,1);
temp_cell=cell(1,1);
temp_cell{1,1}=test_bags{i,1};
for j=1:num_cluster
kernel_j=[];
tempsize=size(clustering{j,1});
for k=1:tempsize(2)
kernel_j{k,1}=training_bags{clustering{j,1}(1,k),1};
end
input(j+1,1)=cluster_dist(temp_cell,kernel_j,1,tempsize(2),bag_dist_measure,cluster_dist_measure);
end
for m=1:num_cluster
input(m+1,1)=exp((-input(m+1,1)^2)/(2*sigma^2));
end
test_data(:,i)=input;
end
Outputs=zeros(num_class,num_test);
Lefthand=matrix_fai'*matrix_fai;
[u,s,v]=svd(Lefthand);
threshold=1e-5;
diag=zeros(num_cluster+1,num_cluster+1);
for i=1:(num_cluster+1)
if (s(i,i)>threshold)
diag(i,i)=1/s(i,i);
end
end
for i=1:num_class
target=(train_target(i,:)==1)';
Righthand=matrix_fai'*target;
Weights=(v*diag*(u'*Righthand))';
for j=1:num_test
Outputs(i,j)=Weights*test_data(:,j);
end
end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -