📄 cce.m
字号:
function output=CCE(train_bags,test_bags,traintarget,num,gamma,cost)
%CCE implements the algorithm described in [1], where Gaussian kernel LibSVM [2] is used as the base learner for transformed feature vectors.
% Syntax
%
% Outputs=CCE(train_bags,test_bags,traintarget,num,gamma,cost)
%
% Description
%
% CCE takes,
% train_bags - An Mx1 cell, where the jth instance of the ith training bag is stored in train_bags{i,1}(j,:)
% test_bags - An Nx1 cell, where the jth instance of the ith test bag is stored in test_bags{i,1}(j,:)
% traintarget - A 1xM array, if the ith training bag is positive then traintarget(1,i)=+1, otherwise equals -1
% num - An 1xD array, where the required number of clusters for the ith individual classifier in the ensemble is sotred in
% num(1,i)
% gamma - The gamma parameter for the gaussian kernel svm, default=1
% cost - The C-parameter for the gaussian kernel svm, default=1
% and returns,
% output - A 1xN array, where the output of the ith test bag is stored in output(1,i), +1 for positive and -1 for negative
%
% [1]Z.-H. Zhou and M.-L. Zhang. Solving multi-instance problems with classifier ensemble based on constructive clustering. Knowledge and Information
% Systems, 2007, 11(2): 155-170.
% [2]C.-C. Chang and C.-J. Lin. LIBSVM: a library for support vector machines. Technical Report, Department of Computer Science and Information
% Engineering, National Taiwan University, Taipei, China, 2001.
if(nargin<4)
error('Not enough input parameters, please check again.');
end
if(nargin<6)
cost=1;
end
if(nargin<5)
gamma=1;
end
[num_train,tempvalue]=size(train_bags);
[num_test,tempvalue]=size(test_bags);
[tempvalue,en_size]=size(num);
Outputs=zeros(num_test,en_size);
instances=[];
for i=1:num_train
instances=[instances;train_bags{i,1}];
end
for iter=1:en_size
disp(strcat('Building the ',num2str(iter),'-th individual classifier'));
centers=k_means(instances,num(1,iter));
trainset=zeros(num(1,iter),num_train);
testset=zeros(num(1,iter),num_test);
for i=1:num_train
tempbag=train_bags{i,1};
[tempsize,tempvalue]=size(tempbag);
for j=1:tempsize
tempdist=zeros(1,num(1,iter));
for k=1:num(1,iter)
tempdist(1,k)=(tempbag(j,:)-centers(1,k))*(tempbag(j,:)-centers(1,k))';
end
[tempmin,index]=min(tempdist);
trainset(index,i)=trainset(index,i)+1;
end
end
trainset=(trainset>=1);
for i=1:num_test
tempbag=test_bags{i,1};
[tempsize,tempvalue]=size(tempbag);
for j=1:tempsize
tempdist=zeros(1,num(1,iter));
for k=1:num(1,iter)
tempdist(1,k)=(tempbag(j,:)-centers(1,k))*(tempbag(j,:)-centers(1,k))';
end
[tempmin,index]=min(tempdist);
testset(index,i)=testset(index,i)+1;
end
end
testset=(testset>=1);
[AlphaY, SVs, Bias, Parameters, nSV, nLabel] = RbfSVC(trainset,traintarget,gamma,cost);
Labels=ones(1,num_test);
[ClassRate, DecisionValue, Ns, ConfMatrix, PreLabels]= SVMTest(testset, Labels, AlphaY, SVs, Bias,Parameters, nSV, nLabel);
Outputs(:,iter)=PreLabels';
end
output=((sum(Outputs')>=0)-0.5)*2;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -