⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 dualsparsegeneralfeatures.m

📁 a function inside machine learning
💻 M
字号:
function [newTrainX, newTestX, subspaceInfo] = dualSparseGeneralFeatures(trainX, trainY, testX, params)
%Compute the dual general features, based on the General Feature Extraction Framework, 
%on a set of training examples and labels and compute corresponding examples 
%projected onto the directions.

if (nargin ~= 4)
    fprintf('%s\n', help(sprintf('%s', mfilename)));
    error('Incorrect number of inputs - see above usage instructions.');
end

useSparse = issparse(trainX); 

%Store all the parameters 
dualFeatureDirection = char(params.dualFeatureDirection); 
T = params.iterations; 
kernelFunction = char(params.kernelFunctionName); 
kernelParams = params;  %Everything is in the same namespace for now

trainK = feval(kernelFunction, trainX, trainX, kernelParams); 
trainTestK = feval(kernelFunction, trainX, testX, kernelParams); 

Kj = trainK; 
Yj = trainY; 

numExamples = size(trainK, 1); 
%Our dual projection directions 
b = ones(numExamples, T); 
tau = ones(numExamples, T); 

alpha = 0.00001; %This number is added to the diagonal of matrices to make them non singular
tol = 10^-100; 

if useSparse
    nonZeros = nnz(trainK);
    KbbK = spalloc(numExamples, numExamples, nonZeros);
    KKbbK = spalloc(numExamples, numExamples, nonZeros);
end 

%Compute the projection directions 
for j=1:T
    fprintf('Iteration %d\n', j); 
    
    b(:, j) = feval(dualFeatureDirection, trainK, Kj, trainY, Yj); 
    
    if useSparse
        Kb = sparse(Kj*b(:, j));
        K2b = sparse(Kj'*Kb); %Note the Kj'
        bK2b = Kb'*Kb;
        KbbK = sparse(Kb*Kb');
        KbbKK = sparse(Kb*K2b');
        projMatrix = sparse(KbbK)/bK2b;
    else
        Kb = Kj*b(:, j);
        K2b = Kj'*Kb; 
        bK2b = Kb'*Kb;
        KbbK = Kb*Kb';
        KbbKK = Kb*K2b';
        projMatrix = KbbK/bK2b;
    end
    
    tau(:, j) = Kb;
    
    %Sometimes norm of Kb becomes too small, so no point in carrying on 
    if abs(bK2b) < tol
        b = b(:, 1:j);
        tau = tau(:, 1:j);
        fprintf('Finished early at projections %d, because norm(tau) is close to zero.\n', j);
        break;
    end
   
    Kj = Kj - KbbKK/bK2b;
    Yj = Yj - projMatrix*Yj; 
    
    %cov = trainY'*Kj*trainY/numExamples 
        
    %Clear up the large temporary variables
    clear K2b KbbK KKbbK projMatrix; 
end

clear Kj; 

%Note the we could stop early, so update T 
T = size(tau, 2); 
Q = tau'*trainK*b + alpha*eye(T);

%Compute new features on training and test data 
Z = b/((diag(diag(tau'*tau)))\(Q)); 

%The scaling is different from the primal general features of the final features but we
%still have Kjb = Xju. 
newTrainX = trainK*Z;
newTestX = trainTestK'*Z;

subspaceInfo = struct; 
%subspaceInfo.Z = Z; 
subspaceInfo.b = sparse(b); 
%subspaceInfo.tau = tau; 

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -