www.pudn.com > SVM_Train.rar > KPCA_SVM_Train.m, change:2008-03-11,size:4130b

function rate = KPCA_SVM %KPCA_SVM Kernel Principle Component Analysis & Support Vector Machine % % Using the ORL database to perform a face recognition method, % which empolys KPCA as features extractor and LSVM as classifier % % Outputs % Pattern : The Pattern of the test data % rate : The recognition rate % Inputs % Train : N x d matrix with the inputs of the training data % N is the number of training points, d is the dimension of data % Test : N x d matrix with the inputs of the test data % N is the number of test points, d is the dimension % d : polynomial degree % max_ev : Number of eigenvalues % % parameters %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% load S_train Tr = S_train; Tr_n = size(Tr, 1); d = 1; max_ev = 120; % test set %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% load S_test Te = S_test; Te_n = size(Te,1); tic % set clock for training % carry out Kernel PCA %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % compute the kernel matrix of training set K_train = feval('poly_kernel',Tr, Tr, d); l = ones(Tr_n,Tr_n)/Tr_n; K_train = K_train - l*K_train - K_train*l + l*K_train*l; % centering in feature space % compute eigenvectors and eigenvalues [evecs,evals] = eig(K_train/Tr_n); [evals,ind] = sort(diag(evals)); % diagonalize and sort eigenvalues Lambda = evals(end:-1:1); Ind = ind(end:-1:1); % normalize eigenvector expansion coefficients for i = 1:max_ev, Alpha(:, i) = evecs(:, Ind(i))/sqrt(Lambda(i)/Tr_n); end Train_Features = K_train * Alpha(:, 1:max_ev); % compute features of training set % compute kernel matrix of test set K_test = feval('poly_kernel', Te, Tr, d); ll = ones(Te_n, Tr_n)/Tr_n; K_test = K_test - ll*K_train - K_test*l + ll*K_train*l; % centering in feature space Test_Features = K_test * Alpha(:, 1:max_ev); % extract features % carry out LSVM & NN %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % compute Lagrange multipliers and bias for i = 1:40, % construct training targets Y(:, i) = ones(Tr_n, 1)*(-1); Y(5*i-4:5*i, i) = Y(5*i-4:5*i, i)*(-1); % compute coefficients through solving quadratic optimizaton with C=1e-8 [alpha(:,i), B(:,i)] = lsvctrain(Train_Features, Y(:, i), 1e-8); end toc % stop watch for training tic % set watch for classification % construct the linear test matrix d = 2; K_test_svm = feval('poly_kernel',Test_Features, Train_Features, d); % mulitply the linear matrix t = K_test_svm*alpha .* Y + repmat(B, Te_n, 1); % classify with decision function C = sign(t); %+ repmat(B, size(t,1), 1)); % recognize patterns by the classifiers Pattern = zeros(Te_n, 1); for i = 1:Te_n, Re =find(C(i, :) > 0 ); % find the classsifiers with output '1' len(i) = length(Re); % if only one classifier with '1' if len(i) == 1, Pattern(i) = Re; % pattern is determined % if more than one classifier with '1's elseif len(i) > 1, % select the corresponding training samples I = repmat([4:-1:0]', 1, size(Re, 2)); R = 5*repmat(Re, 5, 1); R = reshape(R - I, 1, size(R,1)*size(R,2)); % meaure the distances between test point and training samples Distance = dist(Train_Features(R, :), Test_Features(i, :)'); % pick the index of training sample with minimun distance [v, D] = min(Distance); Pattern(i) = Re(ceil(D(1)/5)); % pattern is determined % otherwise the pattern is a new one else Pattern(i) = 41; end end toc % stop watch for classification % recognition rate sum=0; for i = 0:39, for j = 1:5, if ( i+1 == Pattern(5*i+j)) sum = sum + 1; end end end rate = sum/200; % kernel functions %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% function v = poly_kernel(x, y, d) v = (x*y'+1).^d;