📄 leastsquarestest.m.svn-base
字号:
%Test least squares
clear all;
numExamples = 150;
numFeatures = 10;
noise = 0.5;
tol = 10^-5;
X = rand(numExamples, numFeatures);
X = centerData(X);
X = normalise(X);
q = rand(numFeatures, 1);
Y = centerData(X*q + noise*(rand(numExamples, 1)-0.5));
d = data;
d = addDataField(d, 'X', X, 'examples');
d = addDataField(d, 'Y', Y, 'labels');
[trainData, testData] = splitData2(d, 2/3);
params = struct;
[classifierInfo, trainInfo] = leastSquaresRegressorTrain(trainData, params);
[testInfo, predictionInfo] = leastSquaresRegressorPredict(trainData, testData, classifierInfo, params);
correlation(getDataFieldValue(trainData, 'Y'), trainInfo.predictedY)
correlation(getDataFieldValue(testData, 'Y'), testInfo.predictedY)
[trainInfo, testInfo, classifierInfo] = leastSquaresRegressor(getDataFieldValue(trainData, 'X'), getDataFieldValue(trainData, 'Y'), getDataFieldValue(testData, 'X'), params);
correlation(getDataFieldValue(trainData, 'Y'), trainInfo.predictedY)
correlation(getDataFieldValue(testData, 'Y'), testInfo.predictedY)
[testInfo, predictionInfo] = leastSquaresRegressorPredict(trainData, trainData, classifierInfo, params);
correlation(getDataFieldValue(trainData, 'Y'), testInfo.predictedY)
clearAllFields(d);
%Test multi label case
numLabels = 5;
q = rand(numFeatures, numLabels);
Y = centerData(X*q + noise*(rand(numExamples, numLabels)-0.5));
d = data;
d = addDataField(d, 'X', X, 'examples');
d = addDataField(d, 'Y', Y, 'labels');
[trainData, testData] = splitData2(d, 2/3);
[classifierInfo, trainInfo] = leastSquaresRegressorTrain(trainData, params);
[testInfo, predictionInfo] = leastSquaresRegressorPredict(trainData, testData, classifierInfo, params);
correlation(getDataFieldValue(trainData, 'Y'), trainInfo.predictedY)
correlation(getDataFieldValue(testData, 'Y'), testInfo.predictedY)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -