⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 fldakernel.m

📁 一个matlab的工具包,里面包括一些分类器 例如 KNN KMEAN SVM NETLAB 等等有很多.
💻 M
字号:
function [Y_compute, Y_prob] = FLDAKernel(para, X_train, Y_train, X_test, Y_test, num_class)

global temp_model_file preprocess;

Y_compute = zeros(size(Y_test)); Y_prob = zeros(size(Y_test));
if (num_class > 2)
    error('PerceptronKernel: The class number is larger than 2!');
end;

class_set = GetClassSet(Y_train);
p = str2num(char(ParseParameter(para, {'-Kernel';'-KernelParam';'-RegFactor'}, {'0';'0.05';'0'}, 1)));
KernelType = p(1);
KernelPara = p(2); 
RegFactor = p(3);

%X_train_ext = [X_train ones(size(X_train, 1), 1)];
%X_train_ext = X_train_ext(1:size(X_train_ext, 1), :);
%Y_train  = Y_train(1:size(Y_train, 1), :);
%X_test_ext = [X_test ones(size(X_test, 1), 1)];
%X_ext = X_train_ext;

beta = []; 
if (~isempty(X_train)),
    % Convert the binary labels into +/-1
    Y_train = (Y_train == class_set(1)) - (Y_train ~= class_set(1));
    [beta, mu1, mu2] = LearnFLDAKernel(Y_train, X_train, KernelType, KernelPara, RegFactor);
    fid = fopen(temp_model_file, 'w');
    if (fid > 0),
        fprintf('Writing to %s .... \n', temp_model_file);  
        fprintf(fid, 'File: %s\n', preprocess.input_file); 
        fprintf(fid, 'N: %d\n', size(Y_train, 1)); 
        fprintf(fid, '%d ', class_set); fprintf(fid, '\n');
        fprintf(fid, '%f,%f', mu1, mu2); fprintf(fid, '\n');        
        format_str = ''; 
        for i = 1:size(X_train,2)+1, format_str = strcat(format_str, '%f,'); end;
        format_str = strcat(format_str, '\n');
        fprintf(fid, format_str, [beta X_train]');
        fclose(fid);    
    end;
else
    fid = fopen(temp_model_file, 'r');
    if (fid > 0),
        fgets(fid);
        line = fgetl(fid); num = sscanf(line, 'N: %d');
        line = fgetl(fid); class_set = sscanf(line, '%d');      
        [mu1, mu2] = fscanf(fid, '%f,%f', 2);
        input = fscanf(fid, '%f,');
        input = reshape(input, [], num)';
        beta = input(:, 1); X_train = input(:, 2:size(input, 2));
        fclose(fid);    
        preprocess.ClassSet = class_set;
    end;    
end;

% Logit_Y_prob = zeros(size(X_test, 1), 1);
Logit_Y_prob = PredictFLDAKernel(beta, X_train, X_test, KernelType, KernelPara, mu1, mu2);

% Y_prob = (exp(Logit_Y_prob) ./ (1 + exp(Logit_Y_prob))) .* (Logit_Y_prob >= 0) + (1 ./ (1 + exp(Logit_Y_prob))) .* (Logit_Y_prob < 0);
Y_prob = exp(Logit_Y_prob) ./ (1 + exp(Logit_Y_prob));
Y_compute = class_set(1) * (Logit_Y_prob >= 0) + class_set(2) * (Logit_Y_prob < 0);

% Learning 

function [beta, mu1, mu2] = LearnFLDAKernel(Y_train, X_train, KernelType, KernelPara, RegFactor)   

extx = X_train;
mextx = size(extx, 1);
% Build the kernel matrix
switch (KernelType) 
    case 0 
        kernel = extx * extx'; 
    case 1
        kernel = (1 + extx * extx') .^ KernelPara;
    case 2
        kernel = eye(mextx);
        for i = 1:mextx
            k = repmat(extx(i, :), size(extx, 1), 1) - extx;
            kernel(:, i) = sum(k .* k, 2);
        end;
        % kernel = exp(-KernelPara * kernel);
        kernel = exp(- kernel / (2 * KernelPara ^2));
end;

[num_data, num_feature] = size(X_train);
beta = zeros(num_data, 1);

Kc(:, 1) = mean(kernel(:, Y_train == 1), 2);
Kc(:, 2) = mean(kernel(:, Y_train == -1),2);
K = mean(kernel, 2);

num_class = 2;
num_pos = sum(Y_train == 1);
num_neg = sum(Y_train == -1);
SB = Kc * Kc' - num_class * K * K';
SW = kernel * kernel - num_pos * Kc(:, 1) * Kc(:, 1)' - num_neg * Kc(:, 2) * Kc(:, 2)';
% M = SB \ SW;
[V, D] = eig(SB, SW + RegFactor * eye(size(SW)));
[junk, ind] = max(diag(D));
beta = V(:, ind);

L_output = kernel * beta; 
mu1 = mean(L_output(Y_train == 1));
mu2 = mean(L_output(Y_train == -1));
% sigma = std(L_output(Y_train == 1));

% Prediction 
function [L_output, kernel] = PredictFLDAKernel(beta, D_train, D_test, KernelType, KernelPara, mu1, mu2)

if nargin<4, kerneltype = 0; end;
if nargin<5, kernelpara = 0; end;

switch (KernelType) 
    case 0 
        kernel = D_test * D_train'; 
    case 1
        kernel = (1 + D_test * D_train') .^ KernelPara;
    case 2
        % RBFftr = 0.01;
        num_test = size(D_test, 1);
        num_train = size(D_train, 1);        
        kernel = zeros(num_test, num_train);
        for i = 1:num_test
            for j = 1:num_train
                kernel(i, j) = (D_test(i, :) - D_train(j, :)) * (D_test(i, :) - D_train(j, :))';
            end;
        end;
%        kernel = exp(-KernelPara * kernel);
        kernel = exp(- kernel / (2 * KernelPara ^2));
end;
L_output = kernel * beta; 
L_output = (L_output - mu2) .^ 2 - (L_output - mu1) .^ 2;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -