📄 dualgeneralfeaturestest.m
字号:
%A script to test dualGeneralFeatures
clear all;
numExamples = 150;
numFeatures = 10;
noise = 0.5;
tol = 10^-2;
X = rand(numExamples, numFeatures);
X = centerData(X);
X = normalise(X);
q = rand(numFeatures, 1);
Y = centerData(X*q + noise*(rand(numExamples, 1)-0.5));
d = data;
d = addDataField(d, 'X', X, 'examples');
d = addDataField(d, 'Y', Y, 'labels');
[trainData, testData] = splitData2(d, 2/3);
[numTrainExamples, numFeatures] = getDataFieldSize(trainData, 'X');
[numTestExamples, numFeatures] = getDataFieldSize(testData, 'X');
T = min(8, rank(getDataFieldValue(trainData, 'X')));
params.dualFeatureDirection = 'dualMaxCovariance';
params.iterations = T;
params.X.kernel = getDefaultLinearKernel;
[subspaceInfo, trainInfo] = dualGeneralFeaturesTrain(trainData, params);
[testInfo, projectionInfo] = dualGeneralFeaturesProject(trainData, testData, subspaceInfo, params);
%Now, do KPLS manually
K = getDataFieldValue(trainData, 'X')*getDataFieldValue(trainData, 'X')';
Kj = K;
Y = getDataFieldValue(trainData, 'Y');
Yj = Y;
tau = zeros(numTrainExamples, T);
b = zeros(numTrainExamples, T);
for i=1:T
b(:, i) = dualMaxCovariance(K, Kj, Y, Yj);
tau(:, i) = Kj*b(:, i);
Kj = Kj - (tau(:, i)*tau(:, i)')*Kj/(tau(:, i)'*tau(:, i));
Yj = Yj - (tau(:, i)*tau(:, i)')*Yj/(tau(:, i)'*tau(:, i));
end
alpha = 10^-5;
I = alpha*eye(T);
Q = b*inv(inv(tau'*tau)*tau'*K*b + I);
newTrainX = K*Q;
newTestX = getDataFieldValue(testData, 'X')*getDataFieldValue(trainData, 'X')'*Q;
[newTrainX2, newTestX2] = normalise(newTrainX, newTestX);
%The next test checks if b and subspaceInfo.X.b are equivalent, ignoring
%differences in direction
if norm(abs(b\subspaceInfo.X.b) - eye(T)) > tol
error('Dual projection directions are wrong');
end
if norm(abs(newTrainX2\getDataFieldValue(trainInfo.data, 'X')) - eye(T)) > tol
error('Extracted features on training set are wrong');
end
if norm(abs(newTestX2\getDataFieldValue(testInfo.data, 'X')) - eye(T)) > tol
error('Extracted features on test set are wrong');
end
%Now test the legacy code
dualFeatureDirection = 'dualMaxCovariance';
trainTestK = getDataFieldValue(trainData, 'X')*getDataFieldValue(testData, 'X')';
[newTrainX3, newTestX3, subspaceInfo2] = dualGeneralFeatures(getDataFieldValue(trainData, 'X'), Y, getDataFieldValue(testData, 'X'), params);
if norm(abs(newTrainX2\newTrainX3) - eye(T)) > tol
error('Extracted features on training set are wrong in legacy code');
end
if norm(abs(newTestX2\newTestX3) - eye(T)) > tol
error('Extracted features on test set are wrong in legacy code');
end
%Now test double deflation with max variance to see if its the same an
%single (should be), and KPCA
params.dualFeatureDirection = 'dualMaxVariance';
params.doubleDeflation = 1;
[subspaceInfo, trainInfo] = dualGeneralFeaturesTrain(trainData, params);
params.doubleDeflation = 0;
[subspaceInfo2, trainInfo2] = dualGeneralFeaturesTrain(trainData, params);
if norm(abs(getDataFieldValue(trainInfo.data, 'X')\getDataFieldValue(trainInfo2.data, 'X')) - eye(T)) > tol
error('Max variance features should be the same with single and double deflation');
end
%Test if max variance is the same as KPCA
params.normalise = 0;
[subspaceInfo2, trainInfo2] = dualGeneralFeaturesTrain(trainData, params);
KPCAParams.iterations = T;
KPCAParams.X.kernel = getDefaultLinearKernel;
[subspaceInfo3, trainInfo3] = dualPCATrain(trainData, KPCAParams);
if norm(abs(getDataFieldValue(trainInfo2.data, 'X')\getDataFieldValue(trainInfo3.data, 'X')) - eye(T)) > tol
error('Max variance features should be same as KPCA ones');
end
%Check dual deflation against doing it manually
params.dualFeatureDirection = 'dualMaxVariance';
params.doubleDeflation = 1;
params.normalise = 0;
[subspaceInfo, trainInfo] = dualGeneralFeaturesTrain(trainData, params);
trainX = getDataFieldValue(trainData, 'X');
trainK = trainX*trainX';
trainKj = trainK;
numTrainExamples = size(trainX, 1);
b = zeros(numTrainExamples, T);
tau = zeros(numTrainExamples, T);
I = eye(numTrainExamples);
for i=1:T
b(:, i) = dualMaxVariance(trainK, trainKj, 0, 0);
tau(:, i) = trainKj*b(:, i);
trainKj = (I - tau(:, i)*tau(:, i)'/(tau(:, i)'*tau(:, i))) * trainKj * (I - tau(:, i)*tau(:, i)'/(tau(:, i)'*tau(:, i)));
end
if norm(abs(tau\getDataFieldValue(trainInfo.data, 'X')) - eye(T)) > tol
error('For dual deflated general features, tau is incorrect');
end
if norm(abs(b\subspaceInfo.X.b) - eye(T)) > tol
error('For dual deflated general features, dual projection directions are incorrect');
end
%Now check the test projections using dual deflation are correct
params.dualFeatureDirection = 'dualMaxSparseVariance';
params.doubleDeflation = 1;
params.normalise = 0;
[subspaceInfo, trainInfo] = dualGeneralFeaturesTrain(trainData, params);
[testInfo, projectionInfo] = dualGeneralFeaturesProject(trainData, trainData, subspaceInfo, params);
if norm(getDataFieldValue(trainInfo.data, 'X') - getDataFieldValue(testInfo.data, 'X')) > tol
error('Projection function does not produce correct projections on training set');
end
[testInfo, projectionInfo] = dualGeneralFeaturesProject(trainData, testData, subspaceInfo, params);
trainKj = trainK;
testX = getDataFieldValue(testData, 'X');
testTrainKj = testX*trainX';
newTestX = zeros(numTestExamples, T);
tau = getDataFieldValue(trainInfo.data, 'X');
b = subspaceInfo.X.b;
%Compute projections on test set
for i=1:T
b(:, i) = dualMaxSparseVariance(trainK, trainKj, 0, (1:numTrainExamples)');
tau(:, i) = trainKj*b(:, i);
newTestX(:, i) = testTrainKj*b(:, i);
testTrainKj = testTrainKj * (I - b(:, i)*tau(:, i)'*trainKj/(tau(:, i)'*tau(:, i)) - tau(:, i)*tau(:, i)'/(tau(:, i)'*tau(:, i)) + b(:, i)*tau(:, i)'*trainKj*tau(:, i)*tau(:, i)'/(tau(:, i)'*tau(:, i))^2);
trainKj = (I - tau(:, i)*tau(:, i)'/(tau(:, i)'*tau(:, i))) * trainKj * (I - tau(:, i)*tau(:, i)'/(tau(:, i)'*tau(:, i)));
end
if norm(abs(tau\getDataFieldValue(trainInfo.data, 'X')) - eye(T)) > tol
error('For dual deflated general features, tau is incorrect');
end
if norm(abs(newTestX\getDataFieldValue(testInfo.data, 'X')) - eye(T)) > tol
error('Features computed on test set using dual deflation are incorrect');
end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -