@article{Hashemi_Karimi_2018, title={Weighted Machine Learning}, volume={6}, url={http://iapress.org/index.php/soic/article/view/20181202}, DOI={10.19139/soic.v6i4.479}, abstractNote={Sometimes not all training samples are equal in supervised machine learning. This might happen in different applications because some training samples are measured by more accurate devices, training samples come from different sources with different reliabilities, there is more confidence on some training samples than others, some training samples are more relevant than others, or for any other reason the user wants to put more emphasis on some training samples. Non-weighted machine learning techniques are designed for equally important training samples: (a) the cost of misclassification is equal for training samples in parametric classification techniques, (b) residuals are equally important in parametric regression models, and (c) when voting in non-parametric classification and regression models, training samples either have equal weights or their weights are determined internally by kernels in the feature space, thus no external weights. Weighted least squares model is an example of a weighted machine learning technique which takes the training samples’ weights into account. In this work, we develop the weighted versions of Bayesian predictor, perceptron, multilayer perceptron, SVM, and decision tree and show how their results would be different from their non-weighted versions.}, number={4}, journal={Statistics, Optimization & Information Computing}, author={Hashemi, Mahdi and Karimi, Hassan A.}, year={2018}, month={Nov.}, pages={497-525} }