📄 new_start_classify.m
字号:
% ------------------------------------------------% This method has a much be% %disp('In new_start_classify');myalgorithm = [algorithm '_VC'];train_err = [];test_err = [];[Nclasses, classes] = find_classes(targets);for i = 1: redraws, set(hm, 'String', ['Processing iteration ' num2str(i) ' of ' num2str(redraws) ' iterations...']); % Make a draw according to the error method chosen L = length(targets); switch error_method case cellstr('Resubstitution') test_indices = 1:L; train_indices = 1:L; case cellstr('Holdout') [test_indices, train_indices] = make_a_draw(floor(percent/100*L), L); ... case cellstr('Cross-Validation') fprintf('Cross-validation with %d redraws\n',redraws); chunk = floor(L/redraws); test_indices = 1 + (i-1)*chunk : i * chunk; train_indices = [1:(i-1)*chunk, i * chunk + 1:L]; end train_features = features(:, train_indices); train_targets = targets (:, train_indices); test_features = features(:, test_indices); test_targets = targets (:, test_indices); train_err(:,i) = zeros(Nclasses+1,1); test_err(:,i) = zeros(Nclasses+1,1); % fprintf('have %d training and %d test samples, fraction %f\n', length(train_targets), length(test_targets),percent); % Preprocess and then find decision region switch preprocessing case cellstr('None') [train_err(:,i), test_err(:,i)] = feval(myalgorithm, train_features, ... train_targets, AlgorithmParameters, test_features, test_targets); case cellstr('PCA') % preprocessing with PCA disp('Performing preprocessing with PCA') [reduced_features, reduced_targets, UW] = ... feval(preprocessing, train_features, train_targets, PreprocessingParameters, region); [train_err(:,i), test_err(:,i)] = feval(myalgorithm, reduced_features, ... train_targets, AlgorithmParameters, UW*test_features, test_targets); % ================================================================ case cellstr('Fishers_linear_discriminant') disp('Performing preprocessing with Fisher''s linear discriminant ') [reducedFeatures, reducedTargets, w] = feval(preprocessing, train_features, train_targets, [], region); % projects the test features reducedTestFeatures = [w' * test_features; zeros(1,length(test_targets))]; [train_err(:,i), test_err(:,i)] = feval(myalgorithm, reducedFeatures, ... reducedTargets, AlgorithmParameters, reducedTestFeatures, reducedTestTargets); % Replot the data hold off plot_scatter([w'*features; zeros(1,length(targets))], targets, hParent) hold on otherwise fprintf('Performing preprocessing with %s\n',preprocessing); % performing separate preprocessing if strcmp(get(findobj(gcbf, 'Tag', ... '&Options&SeparatePreprocessing'),'Checked'),'on'), % % MUST TAKE CARE OF THIS disp('Perform separate preprocessing for each class.') in0 = find(train_targets == 0); in1 = find(train_targets == 1); [reduced_features0, reduced_targets0] = ... feval(preprocessing, train_features(:,in0), ... train_targets(in0), PreprocessingParameters, region, ... plot_on); [reduced_features1, reduced_targets1] = ... feval(preprocessing, train_features(:,in1), ... train_targets(in1), PreprocessingParameters, region, ... plot_on); reduced_features = [reduced_features0, reduced_features1]; reduced_targets = [reduced_targets0, reduced_targets1]; else % or not [reduced_features, reduced_targets] = ... feval(preprocessing, train_features, train_targets, ... PreprocessingParameters, region, plot_on); end pause(1); plot_process([]); indices = find(sum(isfinite(reduced_features)) > 0); reduced_features = reduced_features(:,indices); reduced_targets = reduced_targets(:,indices); if (i == redraws) % Plot only during the last iteration plot_scatter(reduced_features, reduced_targets, hParent, 1) axis(region(1:4)) end if ((sum(reduced_targets) <= 1) & (sum(~reduced_targets) <= 1) & (~strcmp(algorithm,'None'))) error('Too few reduced points (This program needs at least two points of each class). Please restart.') else if strcmp(algorithm,'None'), %No classification was asked for set(gcf,'pointer','arrow'); return end end disp('Generating decision region') D = feval(algorithm, reduced_features, ... reduced_targets, AlgorithmParameters, region); disp('Calculating the error') [train_err(:,i), test_err(:,i)] = ... calculate_error (D, train_features, train_targets, ... test_features, test_targets, region, Nclasses); end end %Display error%% ====================h = findobj('Tag', 'TestSetError');s = 'Test set errors: ';fprintf('\n Test Set Error Rates\n');for j = 1:Nclasses, s = [s 'Class ' num2str(j) ': ' num2str(mean(test_err(j,:)),2) '. ']; fprintf('Class %d - Error Rate %f\n',j, mean(test_err(j,:))); endm_m = mean(test_err(Nclasses+1,:));fprintf('Overall Error Rate %f\n\n', m_m); s = [s 'Total: ' num2str(mean(test_err(Nclasses+1,:)),2)];set(h, 'String', s);%% ====================h = findobj('Tag', 'TrainSetError');s = 'Train set errors: ';fprintf('\n Training Set Error Rates\n');for j = 1:Nclasses, s = [s 'Class ' num2str(j) ': ' num2str(mean(train_err(j,:)),2) '. ']; fprintf('Class %d - Error Rate %f\n',j, mean(train_err(j,:)));endm_m = mean(train_err(Nclasses+1,:));fprintf('Overall Error Rate %f\n', m_m);s = [s 'Total: ' num2str(mean(train_err(Nclasses+1,:)),2)];set(h, 'String', s);%Show Bayes decision region and error (if possible)% grid on%Replot training points if necessaryif strcmp(get(findobj(gcbf,'Label','Show &Training points'),'Checked'),'on'), plot_scatter(train_features, train_targets, hParent, 2)endhold off%That's all folks!s = 'Finished!';if (redraws > 1), s = [s ' (Note that only the last decision region is shown)']; endset(hm, 'String', s); set(gcf,'pointer','arrow');echo off;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -