⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 dualgeneralfeaturesapprox2test.m

📁 a function inside machine learning
💻 M
字号:
%Test dualGeneralFeaturesApprox2 

clear all; 
tol = 10^-5; 

%[X, y, numExamples, numFeatures] = readCsvData('ionosphere.data');
[X, y, numExamples, numFeatures] = readCsvData('sonar-all.data');

X = normalise(X);

d = data; 
d = addDataField(d, 'X', X, 'examples'); 

[trainData, testData] = splitData2(d, 2/3); 
numTrainExamples = getNumDataExamples(trainData); 
numTestExamples = getNumDataExamples(testData); 

%T = rank(X); 
T = 20; 

gKPLSParams.iterations = T; 
gKPLSParams.doubleDeflation = 1; 
gKPLSParams.dualFeatureDirection = 'dualMaxSparseKernelApprox'; 
gKPLSParams.X.kernel = getDefaultLinearKernel; 
gKPLSParams.Y.name = '';  %Ignore Y 
gKPLSParams.normalise = 0;

[subspaceInfo, trainInfo] = dualGeneralFeaturesTrain(trainData, gKPLSParams);
[testInfo, projectionInfo] = dualGeneralFeaturesApprox2(trainData, trainData, trainData, subspaceInfo, gKPLSParams); 

b = zeros(numTrainExamples, T);
tau = zeros(numTrainExamples, T);
s = zeros(numTrainExamples, T); 
normSqTau = zeros(T); 

trainX = getDataFieldValue(trainData, 'X'); 
trainK = trainX*trainX'; 
trainKj = trainK; 

I = eye(numTrainExamples, numTrainExamples); 

%Lets verify by doing things manually 
%Note: if T=rank(X) last direction seems different 
for i=1:T
    b(:, i) = dualMaxSparseKernelApprox(trainK, trainKj, 0, 0); 
    tau(:, i) = trainKj*b(:, i);  
    s(:, i) = trainK'*tau(:, i)/(tau(:, i)'*tau(:, i));  
    normSqTau(i) = tau(:, i)'*tau(:, i); 
    
    KbbK = tau(:, i)*tau(:, i)';
    KbbKK = KbbK*trainKj/normSqTau(i);
    
    trainKj = trainKj - KbbKK - KbbKK' + KbbKK*KbbK/(normSqTau(i)); 
end

%Values from dualGeneralFeaturesTrain 
b2 = subspaceInfo.X.b; %b is deflated so expect this to be different 
s2 = subspaceInfo.X.s; 
tau2 = subspaceInfo.X.tau; 
normSqTau2 = subspaceInfo.X.normSqTau; 

if norm(s2 - s) > 0.06
    error('Values of s are wrong'); 
end 

if norm(tau2 - tau) > 0.06
    error('Values of tau are wrong'); 
end 

%The approximation to X is TP' = T(T'T)^{-1}T'*X
newTrainX = tau*inv(tau'*tau)*tau'*trainX; 

if T == rank(X) & norm(newTrainX - trainX) > tol
    error('Approximation to X is incorrect in full rank case'); 
end 

%Hence we have K = TP'PT' = T(T'T)^{-1}T'*X*X'*T(T'T)^{-1}T'
newTrainK = tau2*inv(tau2'*tau2)*tau2'*trainK*tau2*inv(tau2'*tau2)*tau2'; 

if T == rank(X) & norm(newTrainK - trainK) > tol
    error('Approximation to K is incorrect in full rank case'); 
end 

%Compute same value using s 
newTrainK = tau2*inv(diag(normSqTau2))*tau2'*s2*tau2'; 

if T == rank(X) & norm(newTrainK - trainK) > tol
    error('Approximation to K is incorrect in full rank case using s'); 
end 

%Now lets test that tauHat is correct 
tauHat1 = projectionInfo.X.tauHat1; 
tauHat2 = projectionInfo.X.tauHat2; 

if norm(tauHat1 - tau2) > tol*10
    error('Values of tauHat1 are wrong'); 
end 

if norm(tauHat2 - tau2) > tol*10
    error('Values of tauHat2 are wrong'); 
end 

newTrainK2 = getDataFieldValue(testInfo.data, 'K'); 

if norm(newTrainK2 - newTrainK) > tol
    error('Approximation to K is incorrect case using dualGeneralFeaturesApprox2'); 
end 

%Finally, lets check the projection for a test kernel 
[testInfo, projectionInfo] = dualGeneralFeaturesApprox2(trainData, testData, testData, subspaceInfo, gKPLSParams); 

testX = getDataFieldValue(testData, 'X'); 
testK = testX*testX'; 

[testInfo2, projectionInfo2] = dualGeneralFeaturesProject(trainData, testData, subspaceInfo, gKPLSParams); 
tauHat = getDataFieldValue(testInfo2.data, 'X'); 

newTestK = tauHat*inv(tau2'*tau2)*tau2'*trainK*tau2*inv(tau2'*tau2)*tauHat';
newTestK2 = getDataFieldValue(testInfo.data, 'K'); 

if norm(newTestK2 - newTestK) > tol
    error('Approximation to test K is incorrect using dualGeneralFeaturesApprox2'); 
end 

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -