@inproceedings{mfernandezdelgadojribeiro2010aparallel, title = {A Parallel Perceptron network for classification with direct calculation of the weights optimizing error and margin}, booktitle = {IEEE International Joint Conference on Neural Networks 2010}, year = {2010}, abstract = {The Parallel Perceptron (PP) is a simple neural network which has been shown to be a universal approximator‚ and it can be trained using the Parallel Delta (P-Delta) rule. This rule tries to maximize the distance between the perceptron activations and their decision hyperplanes in order to increase its generalization ability‚ following the principles of the Statistical Learning Theory. In this paper we propose a closed-form analytical expression to calculate‚ without iterations‚ the PP weights for classification tasks. The calculated weights globally optimize a cost function which takes simultaneously into account the training error and the perceptron margin‚ similarly to the P-Delta rule. Our approach‚ called Direct Parallel Perceptron (DPP) has a linear computational complexity in the number of inputs‚ being very interesting for high-dimensional problems. DPP is competitive with SVM and other approaches (included P-Delta) for two-class classification problems but‚ as opposed to most of them‚ the tunable parameters of DPP do not influence the results very much. Besides‚ the absence of an iterative training stage gives to DPP the ability of on-line learning.}, doi = {10.1109/IJCNN.2010.5596941}, url = {http://dx.doi.org/10.1109/IJCNN.2010.5596941}, author = {M.Fern\'{a}ndez-Delgado and J.Ribeiro and E.Cernadas and S.Barro} }