Files
COBOL-to-JavaTX/paper/bibliographie.bib
2025-09-01 15:28:16 +02:00

47 lines
3.3 KiB
BibTeX

@book{example.book,
author = {Mitchell, Tom Michael},
year = {1997},
title = {Machine Learning},
keywords = {Buch;Entscheidungsb{\"a}ume},
address = {New York},
edition = {International ed.},
publisher = {McGraw-Hill},
isbn = {0071154671 },
series = {McGraw-Hill international editions. Computer science series}
}
@article{example.article,
abstract = {The technology for building knowledge-based systems by inductive inference from examples has been demonstrated successfully in several practical applications. This paper summarizes an approach to synthesizing decision trees that has been used in a variety of systems, and it describes one such system, ID3, in detail. Results from recent studies show ways in which the methodology can be modified to deal with information that is noisy and/or incomplete. A reported shortcoming of the basic algorithm is discussed and two means of overcoming it are compared. The paper concludes with illustrations of current research directions.},
author = {Quinlan, J. R.},
year = {1986},
title = {Induction of decision trees},
url = {https://link.springer.com/article/10.1007/BF00116251 },
keywords = {Entscheidungsb{\"a}ume},
pages = {81--106},
volume = {1},
number = {1},
issn = {1573-0565},
journal = {Machine Learning},
doi = {10.1007/BF00116251 },
file = {31146d1f-a5a1-42b8-bf8d-c1994fd774dc:C\:\\Users\\mujdrica\\AppData\\Local\\Swiss Academic Software\\Citavi 6\\ProjectCache\\dzg95emdx4uirv928vw5rt5frpkg46ghw8vr21834ibg6\\Citavi Attachments\\31146d1f-a5a1-42b8-bf8d-c1994fd774dc.pdf:pdf}
}
@inproceedings{example.inproceeding,
abstract = {In this paper, the effect of overfitting displayed by a decision tree classifier model is studied and the method of resampling technique to eliminate the overfitting is implemented in the pre-pruning stage of the algorithm. The classifier is built for fault classification function subjected to a synthetically generated dataset of the benchmark DAMADICS process which represents a pneumatic actuator system. The overfitting problem for both multiclass classification and binary class classification using maximum depth as the optimized hyper parameter is analyzed. The results before and after eradicating the overfit are tabulated. The performance of the model is plotted between hyper parameter chosen and the testing, training accuracy. The best fit tree model is also graphically visualized.},
author = {{P. Mahalingam} and {D. Kalpana} and {T. Thyagarajan}},
title = {Overfit Analysis on Decision Tree Classifier for Fault Classification in DAMADICS},
keywords = {Entscheidungsb{\"a}ume;Paper},
pages = {1--4},
booktitle = {2021 IEEE Madras Section Conference (MASCON)},
year = {2021},
doi = {10.1109/MASCON51689.2021.9563557 }
}
@misc{example.misc,
abstract = {When we discuss prediction models, prediction errors can be decomposed into two main subcomponents we care about: error due to bias and error due to variance. There is a tradeoff between a model's ability to minimize bias and variance. Understanding these two types of error can help us diagnose model results and avoid the mistake of over- or under-fitting.},
year = {01.06.2012},
author = {Scott Fortmann-Roe},
title = {Understanding the Bias-Variance Tradeoff},
url = {https://scott.fortmann-roe.com/docs/BiasVariance.html},
urldate = {13.07.2023}
}