@inproceedings{piater2024medical, type = {inproceedings}, key = {piater2024medical}, title = {When Medical Imaging Met Self-Attention: A Love Story That Didn’t Quite Work Out}, booktitle = {International Conference on Computer Vision Theory and Applications (VISAPP)}, author = {Tristan Piater and Niklas Penzel and Gideon Stein and Joachim Denzler}, year = {2024}, pages = {149-158}, organization = {INSTICC}, publisher = {SciTePress}, abstract = {A substantial body of research has focused on developing systems that assist medical professionals during labor-intensive early screening processes, many based on convolutional deep-learning architectures. Recently, multiple studies explored the application of so-called self-attention mechanisms in the vision domain. These studies often report empirical improvements over fully convolutional approaches on various datasets and tasks. To evaluate this trend for medical imaging, we extend two widely adopted convolutional architectures with different self-attention variants on two different medical datasets. With this, we aim to specifically evaluate the possible advantages of additional self-attention. We compare our models with similarly sized convolutional and attention-based baselines and evaluate performance gains statistically. Additionally, we investigate how including such layers changes the features learned by these models during the training. Following a hyperparameter search, and contrary to our expectations, we observe no significant improvement in balanced accuracy over fully convolutional models. We also find that important features, such as dermoscopic structures in skin lesion images, are still not learned by employing self-attention. Finally, analyzing local explanations, we confirm biased feature usage. We conclude that merely incorporating attention is insufficient to surpass the performance of existing fully convolutional methods.}, groups = {biomedical}, doi = {10.5220/0012382600003660}, url = {https://www.scitepress.org/PublicationsDetail.aspx?ID=mLETKq/KgdA=&t=1}, isbn = {978-989-758-679-8}, issn = {2184-4321}, note = {}, }