@inproceedings{e5009392e4034ee38adb9bdb5fadf6dd,
title = "Statistical Theory of Overtraining - Is Cross-Validation Asymptotically Effective?",
abstract = "A statistical theory for overtraining is proposed. The analysis treats realizable stochastic neural networks, trained with Kullback-Leibler loss in the asymptotic case. It is shown that the asymptotic gain in the generalization error is small if we perform early stopping, even if we have access to the optimal stopping time. Considering cross-validation stopping we answer the question: In what ratio the examples should be divided into training and testing sets in order to obtain the optimum performance. In the non-asymptotic region cross-validated early stopping always decreases the generalization error. Our large scale simulations done on a CM5 are in nice agreement with our analytical findings.",
author = "S. Amari and N. Murata and M{\"u}ller, \{K. R.\} and M. Finke and H. Yang",
note = "Publisher Copyright: {\textcopyright} 1995 Neural information processing systems foundation. All rights reserved.; 8th Advances in Neural Information Processing Systems, NIPS 1995 ; Conference date: 27-11-1995 Through 30-11-1995",
year = "1995",
language = "英語",
series = "Advances in Neural Information Processing Systems",
publisher = "Neural information processing systems foundation",
pages = "176--182",
editor = "D. Touretzky and M.C. Mozer and M. Hasselmo",
booktitle = "Advances in Neural Information Processing Systems 8, NIPS 1995",
address = "米国",
}