📄 perceptronkernel.m
字号:
function [Y_compute, Y_prob] = PerceptronKernel(para, X_train, Y_train, X_test, Y_test, num_class)
global temp_model_file preprocess;
Y_compute = zeros(size(Y_test)); Y_prob = zeros(size(Y_test));
if (num_class > 2)
error('PerceptronKernel: The class number is larger than 2!');
end;
class_set = GetClassSet(Y_train);
p = str2num(char(ParseParameter(para, {'-Kernel';'-KernelParam'; '-CostFactor'; '-MaxIter'}, {'0';'0.05';'1';'100'}, 1)));
KernelType = p(1);
KernelPara = p(2);
CostFactor = p(3);
MaxIter = p(4);
X_train_ext = [X_train ones(size(X_train, 1), 1)];
X_train_ext = X_train_ext(1:size(X_train_ext, 1), :);
Y_train = Y_train(1:size(Y_train, 1), :);
X_test_ext = [X_test ones(size(X_test, 1), 1)];
X_ext = X_train_ext;
Logit_Y_prob = zeros(size(X_test, 1), 1);
beta = [];
if (~isempty(X_train)),
% Convert the binary labels into +/-1
Y_train = (Y_train == class_set(1)) - (Y_train ~= class_set(1));
beta = LearnPerceptKernel(Y_train, X_train_ext, KernelType, KernelPara, CostFactor, MaxIter);
fid = fopen(temp_model_file, 'w');
if (fid > 0),
fprintf('Writing to %s .... \n', temp_model_file);
fprintf(fid, 'File: %s\n', preprocess.input_file);
fprintf(fid, 'N: %d\n', size(Y_train, 1));
fprintf(fid, '%d ', class_set); fprintf(fid, '\n');
format_str = '';
for i = 1:size(X_ext,2)+1, format_str = strcat(format_str, '%f,'); end;
format_str = strcat(format_str, '\n');
fprintf(fid, format_str, [beta X_ext]');
fclose(fid);
end;
else
fid = fopen(temp_model_file, 'r');
if (fid > 0),
fgets(fid);
line = fgetl(fid); num = sscanf(line, 'N: %d');
line = fgetl(fid); class_set = sscanf(line, '%d');
input = fscanf(fid, '%f,');
input = reshape(input, [], num)';
beta = input(:, 1); X_ext = input(:, 2:size(input, 2));
fclose(fid);
preprocess.ClassSet = class_set;
end;
end;
Logit_Y_prob = PredictPerceptKernel(beta, X_ext, X_test_ext, KernelType, KernelPara);
% Y_prob = (exp(Logit_Y_prob) ./ (1 + exp(Logit_Y_prob))) .* (Logit_Y_prob >= 0) + (1 ./ (1 + exp(Logit_Y_prob))) .* (Logit_Y_prob < 0);
Y_prob = exp(Logit_Y_prob) ./ (1 + exp(Logit_Y_prob));
Y_compute = class_set(1) * (Logit_Y_prob >= 0) + class_set(2) * (Logit_Y_prob < 0);
% Learning
function beta = LearnPerceptKernel(Y_train, X_train_ext, KernelType, KernelPara, CostFactor, MaxIter)
extx = X_train_ext;
mextx = size(extx, 1);
% Build the kernel matrix
switch (KernelType)
case 0
kernel = extx * extx';
case 1
kernel = (1 + extx * extx') .^ KernelPara;
case 2
kernel = eye(mextx);
for i = 1:mextx
k = repmat(extx(i, :), size(extx, 1), 1) - extx;
kernel(:, i) = sum(k .* k, 2);
end;
% kernel = exp(-KernelPara * kernel);
kernel = exp(- kernel / (2 * KernelPara ^2));
end;
[num_data, num_feature] = size(X_train_ext);
beta = zeros(num_data, 1);
for t = 1:MaxIter,
all_correct = true;
for i = 1:num_data,
if (Y_train(i) .* (kernel(i, :) * beta)) <= 0,
beta(i) = beta(i) + Y_train(i);
all_correct = false;
end;
end;
if (all_correct), break; end;
% fprintf('%d', t);
end;
% Prediction
function [L_output, kernel] = PredictPerceptKernel(beta, D_train, D_test, KernelType, KernelPara)
if nargin<4, kerneltype = 0; end;
if nargin<5, kernelpara = 0; end;
switch (KernelType)
case 0
kernel = D_test * D_train';
case 1
kernel = (1 + D_test * D_train') .^ KernelPara;
case 2
% RBFftr = 0.01;
num_test = size(D_test, 1);
num_train = size(D_train, 1);
kernel = zeros(num_test, num_train);
for i = 1:num_test
for j = 1:num_train
kernel(i, j) = (D_test(i, :) - D_train(j, :)) * (D_test(i, :) - D_train(j, :))';
end;
end;
% kernel = exp(-KernelPara * kernel);
kernel = exp(- kernel / (2 * KernelPara ^2));
end;
L_output = kernel * beta;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -