@article{Pourmand_Shabbak_Ganjali_2021, title={Feature Selection Based on Divergence Functions: A Comparative Classiffication Study}, volume={9}, url={http://iapress.org/index.php/soic/article/view/1092}, DOI={10.19139/soic-2310-5070-1092}, abstractNote={<p>Due to the extensive use of high-dimensional data and its application in a wide range of scientifc felds of research, dimensionality reduction has become a major part of the preprocessing step in machine learning. Feature selection is one procedure for reducing dimensionality. In this process, instead of using the whole set of features, a subset is selected to be used in the learning model. Feature selection (FS) methods are divided into three main categories: flters, wrappers, and embedded approaches. Filter methods only depend on the characteristics of the data, and do not rely on the learning model at hand. Divergence functions as measures of evaluating the differences between probability distribution functions can be used as flter methods of feature selection. In this paper, the performances of a few divergence functions such as Jensen-Shannon (JS) divergence and Exponential divergence (EXP) are compared with those of some of the most-known flter feature selection methods such as Information Gain (IG) and Chi-Squared (CHI). This comparison was made through accuracy rate and F1-score of classifcation models after implementing these feature selection methods.</p&gt;}, number={3}, journal={Statistics, Optimization & Information Computing}, author={Pourmand, Saeid and Shabbak, Ashkan and Ganjali, Mojtaba}, year={2021}, month={Jul.}, pages={587-606} }