Creative Commons Attribution 4.0 International license
Graph Neural Networks (GNNs) are a family of neural architectures that are naturally suited to learning functions on graphs. They are now used in a wide range of applications. It has been observed that GNNs share many similarities with classical computer science (CS) formalisms, such as the Weisfeiler-Leman graph isomorphism test, bisimulation, and logic. Most notably, both GNNs and these formalisms deal with functions on graphs and graph-like structures. This observation opens up an opportunity to compare GNN architectures with these formalisms in terms of different kinds of expressibility, thus positioning these architectures within the well-established landscape of theoretical CS. This, in turn, helps us better understand the fundamental capabilities and limitations of various GNN architectures, enabling more informed choices about which architecture to use - if any at all. In these lecture notes, I give an introduction to the state-of-the-art foundations of GNNs - specifically, our current understanding of their expressibility in terms of the classical formalisms, considering several notions of expressive power.
@InProceedings{kostylev:OASIcs.RW.2024/2025.3,
author = {Kostylev, Egor V.},
title = {{Foundations of Graph Neural Networks (A Logician’s View)}},
booktitle = {Joint Proceedings of the 20th and 21st Reasoning Web Summer Schools (RW 2024 \& RW 2025)},
pages = {3:1--3:19},
series = {Open Access Series in Informatics (OASIcs)},
ISBN = {978-3-95977-405-5},
ISSN = {2190-6807},
year = {2025},
volume = {138},
editor = {Artale, Alessandro and Bienvenu, Meghyn and Garc{\'\i}a, Yazm{\'\i}n Ib\'{a}\~{n}ez and Murlak, Filip},
publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik},
address = {Dagstuhl, Germany},
URL = {https://drops.dagstuhl.de/entities/document/10.4230/OASIcs.RW.2024/2025.3},
URN = {urn:nbn:de:0030-drops-250486},
doi = {10.4230/OASIcs.RW.2024/2025.3},
annote = {Keywords: Graph Neural Networks, Expressivity, Logic}
}