⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 dualsparsegeneralfeatureslm3test.m

📁 a function inside machine learning
💻 M
字号:
%A script to find problems in if dualSparseGeneralFeaturesLM3 
clear;
rand('state',22); 
dataSet = 'ionosphere';
csvFileName = sprintf('%s.data', dataSet); ;

[X, y, numExamples, numFeatures] = readCsvData(csvFileName);

X = centerData(X);
X = normalise(X);

numExamples = min(100, numExamples);
[X, y] = sampleData(X, y, numExamples); 
[trainX, trainY, testX, testY] = splitData(X, y, 2/3); 

numTrainExamples = size(trainX, 1); 
numTestExamples = size(testX, 1); 

tol = 10^-2; 
T = 10; 

params.dualFeatureDirection = 'dualMaxSparseAlignmentLM'; 
params.dualSparseMeasureFunction = 'sparseAlignments';
params.iterations = T;
params.kernelFunctionName = 'linearKernel';
params.chunkSize = 500;
params.cacheSize = 500;
params.sigma = 1;
params.normalise = 1;

%Do some simple test first 

[newTrainX, newTestX, subspaceInfo] = dualSparseGeneralFeaturesLM3(trainX, trainY, testX, params);
[newTrainX2, newTestX2, subspaceInfo2] = dualSparseGeneralFeaturesLM(trainX, trainY, testX, params);


if norm(newTrainX - newTrainX2) > tol 
    error('Features on training set do not correspond with dualSparseGeneralFeaturesLM'); 
end 

if norm(newTestX - newTestX2) > tol 
    error('Features on testing set do not correspond with dualSparseGeneralFeaturesLM'); 
end 

if norm(full(subspaceInfo.X.b - subspaceInfo2.b)) > tol 
	error('Dual projections do not correspond with dualSparseGeneralFeaturesLM'); 
end 

if size(newTrainX, 1) ~= numTrainExamples | size(newTestX, 1) ~= numTestExamples 
    error('Not returning the correct number of examples'); 
end 

if trace(newTrainX'*newTrainX) - sum(sum(newTrainX'*newTrainX)) > 0.5
    error('Training data is not orthogonal'); 
end 

if trace(newTestX'*newTestX) - sum(sum(newTestX'*newTestX)) > 0.5
    error('Testing data is not orthogonal'); 
end 

for i=1:T 
    if abs(norm(newTrainX(:, i)) - 1) > tol
        error('Feature number %d is not normalised', i); 
    end 
end 

%Now test the ability to drop out by using all ones kernel 
params.kernelFunctionName = 'rbfKernel';
params.sigma = 100;

[newTrainX, newTestX, subspaceInfo] = dualSparseGeneralFeaturesLM3(trainX, trainY, testX, params);

%Now test iterating past rank of data 
T = rank(trainX) + 10; 
params.kernelFunctionName = 'linearKernel';
params.iterations = T;
[newTrainX, newTestX, subspaceInfo] = dualSparseGeneralFeaturesLM3(trainX, trainY, testX, params);

if size(newTestX, 2) >= T 
    error('Seems to be iterating past the rank of the data'); 
end 

[newTrainX, newTestX, subspaceInfo] = dualSparseGeneralFeaturesLM3(trainX, trainY, trainX, params);

if norm(newTrainX - newTestX) > tol 
    error('Features generated incorrectly on test set'); 
end 

%Now test turning normalisation off 
T = 10; 
params.normalise = 0;
params.iterations = T;

[newTrainX, newTestX, subspaceInfo] = dualSparseGeneralFeaturesLM3(trainX, trainY, testX, params);
[newTrainX2, newTestX2, subspaceInfo2] = dualSparseGeneralFeaturesLM(trainX, trainY, testX, params);


if norm(newTrainX - newTrainX2) > tol 
    error('Features on unnormalised training set do not correspond with dualSparseGeneralFeaturesLM'); 
end 

if norm(newTestX - newTestX2) > tol
    error('Features on unnormalised testing set do not correspond with dualSparseGeneralFeaturesLM'); 
end 

%Try some sparse data 
trainX = sparse(trainX); 
trainY = sparse(trainY); 
testX = sparse(testX); 
[newTrainX, newTestX, subspaceInfo] = dualSparseGeneralFeaturesLM3(trainX, trainY, testX, params);

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -