📄 parzen_vc.m
字号:
% Learns classifier and classifies test set% using Parzen Windows% Usage% [trainError, testError, estTrainLabels, estTestLabels] = ...% Parzen_VC(trainFeatures, trainLabels,Nmu ,testFeatures, testLabels)% where%% Inputs:% trainFeatures - the training set vectors, one vector per column% trainLabels - the labels of the above% params hParzen The window width% Kernel The kernel% testFeatures - test set, one column per vector% testLabels - labels for test set%% Outputs% trainError - the error rate on the training set (one entry per% class + total error)% testError - the error rate on the test set (one entry per class% + total error)% estTrainLabels - the labels produced by the algorithm for the% training samples% estTestLabels - the labels produced by the algorithm for the% test samplesfunction [trainError, testError, hatTrainLabels, hatTestLabels] = ... Parzen_VC(trainFeatures, trainLabels, params, testFeatures, testLabels)comma_loc = findstr(params,',');hParzen = str2num(params(2:comma_loc(1)-1));kernel = params(comma_loc(1)+2:length(params)-2);alpha = 0.9;[Ndim, Nsam] = size(trainFeatures);[Nclasses, classes] = find_classes([trainLabels(:); testLabels(:)]); % Number of classes in labelsif ~( strcmp(kernel, 'Epanechnikov') | strcmp(kernel, 'Tri-cube') ... | strcmp(kernel, 'Gaussian') | strcmp(kernel, 'Bell')), fprintf('Kernel %10s unsupported\n', kernel); fprintf('We currently support only the following kernels\n'); fprintf(' Epanechnikov\n'); fprintf(' Tri-cube\n'); fprintf(' Gaussian\n'); fprintf(' Bell\n'); trainError = ones(1,Nclasses+1); testError = ones(1,Nclasses+1); hatTrainLabels = -ones(size(trainLabels)); hatTestLabels = -ones(size(trainLabels)); return;end hm = findobj('Tag', 'Messages'); fprintf('Parzen: Training\n');if (isempty(hm)==0) s = sprintf('Parzen: Training'); set(hm,'String',s); refresh;endh_factor = hParzen;% fprintf('Adaptive kernel widths = ');for cl =1:Nclasses, % identifies the samples for the class trainForClass = find(trainLabels == classes(cl)); classFeatures = trainFeatures(:,trainForClass); n(cl) = length(trainForClass); if ( n(cl) > 0) covariance = cov(classFeatures')/sqrt(Nsam); else covariance = diag(ones(1,Ndim),0); end priors(cl) = length(classFeatures)/length(trainLabels); sigma(cl) = sqrt(sum(diag(covariance))); h(cl) = sigma(cl)*h_factor;% fprintf(' %f ',h(cl));end%fprintf('\n');priors = priors./length(trainLabels);[foo, bestGuess] = max(priors);bestGuess = classes(bestGuess);fprintf('Parzen (%s): Computing Training Set Error\n',kernel);if (isempty(hm)==0) s = sprintf('Parzen (%s), Computing Error on Training Set',kernel); set(hm,'String',s); refresh;endfor sam =1:Nsam thisSample = trainFeatures(:,sam); for cl =1:Nclasses, % identifies the samples for the class trainForClass = find(trainLabels == classes(cl)); classFeatures = trainFeatures(:,trainForClass); Onen = ones(1,n(cl)); diff = zeros(1, n(cl)); for dim=1:Ndim, diff = diff + (classFeatures(dim,:) - (Onen*thisSample(dim))).^2; end diff = diff/(h(cl)^2); if strcmp(kernel, 'Epanechnikov'), diff = (1-diff).* (diff < 1); elseif strcmp(kernel, 'Tri-cube'), diff = ((1-diff.^(3/2)).^3).*(diff < 1); elseif strcmp(kernel, 'Gaussian'), diff = exp(-diff/2); % ignore the constants elseif strcmp(kernel, 'Bell') diff = exp(-(1 -sqrt(diff)).^(-2)) .* (diff < 1); end % Now we add the contributions up, and have the local posterior density % estimate classPosterior(cl) = sum(diff); end [m ind] = max(classPosterior); if (m == 0) hatTrainLabels(sam) = bestGuess; else hatTrainLabels(sam) = classes(ind); endend% %fprintf('Parzen (%s): Computing Test Set Error\n',kernel);if (isempty(hm)==0) s = sprintf('Parzen (%s): Computing Error on Trest Set',kernel); set(hm,'String',s); refresh;end[Ndim, Nsam] = size(testFeatures);for sam =1:Nsam thisSample = testFeatures(:,sam); for cl =1:Nclasses, % identifies the samples for the class trainForClass = find(trainLabels == classes(cl)); classFeatures = trainFeatures(:,trainForClass); Onen = ones(1,n(cl)); diff = zeros(1, n(cl)); for dim=1:Ndim, diff = diff + (classFeatures(dim,:) - (Onen*thisSample(dim))).^2; end diff = diff/(h(cl)^2); if strcmp(kernel, 'Epanechnikov'), diff = (1-diff).* (diff < 1); elseif strcmp(kernel, 'Tri-cube'), diff = ((1-diff.^(3/2)).^3).*(diff < 1); elseif strcmp(kernel, 'Gaussian'), diff = exp(-diff/2); % ignore the constants elseif strcmp(kernel, 'Bell') diff = exp(-(1 -sqrt(diff)).^(-2)) .* (diff < 1); end % Now we add the contributions up, and have the local posterior density % estimate classPosterior(cl) = sum(diff); end [m ind] = max(classPosterior); if (m == 0) hatTestLabels(sam) = bestGuess; else hatTestLabels(sam) = classes(ind); endend%%%trainError = computeError(classes, trainLabels, hatTrainLabels);testError = computeError(classes, testLabels , hatTestLabels);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -