@conference {12693,
title = {Classification probability analysis of principal component space analysis},
booktitle = {Pattern Recognition, 2004. ICPR 2004. Proceedings of the 17th International Conference on},
volume = {1},
year = {2004},
month = {2004/08//},
pages = {240 - 243 Vol.1 - 240 - 243 Vol.1},
abstract = {In a previous paper, we have presented a new linear classification algorithm, principal component space analysis (PCNSA) which is designed for problems like object recognition where different classes have unequal and non-white noise covariance matrices. PCNSA first obtains a principal components space (PCA space) for the entire data and in this PCA space, it finds for each class "i", an M_{i} dimensional subspace along which the class{\textquoteright}s intra-class variance is the smallest. We call this subspace an approximate space (ANS) since the lowest variance is usually "much smaller" than the highest. A query is classified into class "i" if its distance from the class{\textquoteright}s mean in the class{\textquoteright}s ANS is a minimum. In this paper, we discuss the PCNSA algorithm more precisely and derive tight upper bounds on its classification error probability. We use these expressions to compare classification performance of PCNSA with that of subspace linear discriminant analysis (SLDA).},
keywords = {algorithm;, analysis;, approximate, classification, classification;, component, covariance, discriminant, error, intraclass, linear, matrices;, matrix;, noise;, nonwhite, object, pattern, PCA;, principal, probability;, recognition;, space, space;, statistics;, subspace, variance;},
doi = {10.1109/ICPR.2004.1334068},
author = {Vaswani, N. and Chellapa, Rama}
}