Home > demos > ada_demo.m

ada_demo

PURPOSE ^

% AdaBoost Classifier Demo

SYNOPSIS ^

This is a script file.

DESCRIPTION ^

% AdaBoost Classifier Demo
 Several ways to use AdaBoosting Algorithm classifier
%
% Load dataset

CROSS-REFERENCE INFORMATION ^

This function calls: This function is called by:

SOURCE CODE ^

0001 %% AdaBoost Classifier Demo
0002 % Several ways to use AdaBoosting Algorithm classifier
0003 %%
0004 %% Load dataset
0005 
0006 % Name of matlab dataset
0007 % Available names : [cancer, wine, iris, crab, glass, simpleclass, thyroid]
0008 datasetName = 'iris';
0009 
0010 % This is just an example to load matlab datasets
0011 % you can load datasets from different sources with different ways
0012 % as long as the you provide x the training instance which is a matrix
0013 % of size(Number of Instance,Number of Features) and y which is the
0014 % label matrix having size(Number of Instance, 1)
0015 load(strcat(datasetName, '_dataset'));
0016 eval(sprintf('x = %sInputs;', datasetName));
0017 eval(sprintf('y = %sTargets;', datasetName));
0018 x = x';
0019 y = y';
0020 numClasses = size(y, 2);
0021 [~,y] = max(y,[],2);
0022 
0023 numFeatures = size(x, 2);
0024 numInstances = size(x, 1);
0025 
0026 % display dataset info
0027 disp(['Dataset Name ' datasetName]);
0028 disp(['Number of Classes ' num2str(numClasses)]);
0029 disp(['Number of Instances ' num2str(numInstances)]);
0030 disp(['Number of Features ' num2str(numFeatures)]);
0031 
0032 
0033 %% Basic Usage
0034 
0035 fprintf('===========\n');
0036 fprintf('Basic Usage\n');
0037 fprintf('===========\n');
0038 
0039 % create AdaBoost classifier Using svm as weak classifier
0040 adacl = AdaBooster(SVMClassifier(numClasses));
0041 
0042 % train Ada classifier
0043 fprintf('\tTraining Classifier for 3 iterations');
0044 [adacl, learnErr] = learn(adacl, x, y, 3);
0045 fprintf('\tLearning Error %f\n', learnErr);
0046 
0047 fprintf('\t-----\n');
0048 fprintf('\tTesting Classifier\n');
0049 outs = computeOutputs(adacl, x);
0050 % other way to calculate error either on training dataset or other dataset
0051 err = sum(outs ~= y) / numInstances;
0052 fprintf('\tLearning Error %f\n', err);
0053 
0054 % comparing outputs of first 5 instances in predicted and target outputs
0055 fprintf('\t[predicted outputs : correct outputs]\n');
0056 fprintf('\t\t%d\t\t%d\t\n',outs(1:5, :), y(1:5 , :));
0057 
0058 %% Stage Details
0059 
0060 fprintf('=============\n');
0061 fprintf('Stage Details\n');
0062 fprintf('=============\n');
0063 
0064 adacl = AdaBooster(SVMClassifier(numClasses));
0065 
0066 adacl = learn(adacl, x, y, 3, true);
0067 fprintf('\tLearning Error %f\n', learnErr);
0068 
0069 %% Display Classifier
0070 
0071 fprintf('==================\n');
0072 fprintf('Display Classifier\n');
0073 fprintf('==================\n');
0074 
0075 fprintf('\tDisplay Classifier Before Learning\n\t===>\n');
0076 adacl = AdaBooster(SVMClassifier(numClasses));
0077 display(adacl);
0078 fprintf('\t<===\n');
0079 
0080 fprintf('\tDisplay Classifier After Learning\n===>\n');
0081 adacl = learn(adacl, x, y, 2);
0082 display(adacl);
0083 fprintf('\t<===\n');
0084 
0085 %% Iteratins Errors
0086 
0087 fprintf('================\n');
0088 fprintf('Iterations Error\n');
0089 fprintf('================\n');
0090 
0091 adacl = AdaBooster(SVMClassifier(numClasses));
0092 
0093 fprintf('\tTraining Classifier for 10 iterations\n');
0094 [adacl, learnErr, iterationsErrors] = learn(adacl, x, y, 10);
0095 fprintf('\tLearning Error %f\n', learnErr);
0096 
0097 fprintf('\t------------\n');
0098 fprintf('\tPlotting Iteration Errors\n');
0099 plot(1:length(iterationsErrors), iterationsErrors);
0100 ylim([0 1]);
0101 
0102 %% Add More Boosting Stages
0103 
0104 fprintf('========================\n');
0105 fprintf('Add More Boosting Stages\n');
0106 fprintf('========================\n');
0107 
0108 adacl = AdaBooster(SVMClassifier(numClasses));
0109 
0110 fprintf('\tTraining Classifier for 5 iterations\n');
0111 adacl = learn(adacl, x, y, 5);
0112 fprintf('\t------------\n');
0113 fprintf('\tTraining Classifier for 5 more iterations\n');
0114 [adacl, learnErr] = learn(adacl, x, y, 10);
0115 fprintf('\tLearning Error %f\n', learnErr);
0116 
0117 
0118 %% Using Different Weak Classifier
0119 
0120 fprintf('===============================\n');
0121 fprintf('Using Different Weak Classifier\n');
0122 fprintf('===============================\n');
0123 
0124 adacl = AdaBooster(DecisionTreeClassifier(numClasses));
0125 
0126 % train Ada classifier
0127 fprintf('\tTraining Classifier for 3 iterations\n');
0128 [adacl, learnErr] = learn(adacl, x, y, 3);
0129 fprintf('\tLearning Error %f\n', learnErr);
0130 
0131 %% Add Boost Stages till reaching a required Err Bound
0132 
0133 fprintf('===================================================\n');
0134 fprintf('Add Boost Stages till reaching a required Err Bound\n');
0135 fprintf('===================================================\n');
0136 
0137 adacl = AdaBooster(SVMClassifier(numClasses));
0138 [adacl, learnErr] = learn(adacl, x, y, Inf, true, '', '', NaN, 0.02);
0139 fprintf('\tLearning Error %f\n', learnErr);

Generated on Sun 29-Sep-2013 01:25:24 by m2html © 2005