@mastersthesis{Demus2023, type = {Master Thesis}, author = {Demus, Christoph}, title = {Analysis of Continuous Learning Strategies at the Example of Replay-Based Text Classification}, institution = {Angewandte Computer- und Bio­wissen­schaften}, pages = {75}, year = {2023}, abstract = {Continuous learning is a research field that has significantly boosted in recent years due to highly complex machine and deep learning models. Whereas static models need to be retrained entirely from scratch when new data get available, continuous models progressively adapt to new data saving computational resources. In this context, this work analyzes parameters impacting replay-based continuous learning approaches at the example of a data-incremental text classification task using an MLP and LSTM. Generally, it was found that replay improves the results compared to naive approaches but achieves not the performance of a static model. Mainly, the performances increased with more replayed examples, and the number of training iterations has a significant influence as it can partly control the stability-plasticity-trade-off. In contrast, the impact of balancing the buffer and the strategy to select examples to store in the replay buffer were found to have a minor impact on the results in the present case.}, subject = {Maschinelles Lernen}, language = {en} }