@InProceedings{10.1007/978-3-031-87327-0_13,
author="Tousside, Basile
and Frochte, J{\"o}rg
and Meisen, Tobias",
editor="Rocha, Ana Paula
and Steels, Luc
and van den Herik, Jaap",
title="Dynamic Capacity Expansion in Continual Learning: The eSECL Approach",
booktitle="Agents and Artificial Intelligence",
year="2025",
publisher="Springer Nature Switzerland",
address="Cham",
pages="272--291",
abstract="Humans excel at adapting to constantly changing environments, while artificial neural networks (ANNs) struggle with forgetting under dynamic conditions. Continual learning (CL) research aims to address this issue by enabling neural networks to sequentially acquire new knowledge, balancing stability (retaining past tasks) and plasticity (adapting to new tasks). Recently, promising work has emerged in this area. Notably, Sparsification and Expansion for CL (SECL) introduces a robust framework that sparsely uses a neural network's available capacity and expands it when this capacity is exhausted. We propose an enhanced version of SECL, dubbed eSECL, which addresses two key weaknesses of the original SECL algorithm. First, SECL was primarily investigated using convolutional neural networks (CNNs). Second, it does not properly address how much capacity to add when growing a network. We address the first issue by generalizing the algorithm to both dense neural networks and CNNs. For the second issue, we introduce a robust heuristic that monitors the model capacity at each layer and determines the necessary capacity to add. Experiments on popular CL datasets demonstrate the superiority of eSECL over state-of-the-art methods.",
isbn="978-3-031-87327-0"
}

