,
Simon Razniewski
,
Jan-Christoph Kalo
,
Sneha Singhania
,
Jiaoyan Chen
,
Stefan Dietze
,
Hajira Jabeen
,
Janna Omeliyanenko
,
Wen Zhang
,
Matteo Lissandrini
,
Russa Biswas
,
Gerard de Melo
,
Angela Bonifati
,
Edlira Vakaj
,
Mauro Dragoni
,
Damien Graux
Creative Commons Attribution 4.0 International license
Large Language Models (LLMs) have taken Knowledge Representation - and the world - by storm. This inflection point marks a shift from explicit knowledge representation to a renewed focus on the hybrid representation of both explicit knowledge and parametric knowledge. In this position paper, we will discuss some of the common debate points within the community on LLMs (parametric knowledge) and Knowledge Graphs (explicit knowledge) and speculate on opportunities and visions that the renewed focus brings, as well as related research topics and challenges.
@Article{pan_et_al:TGDK.1.1.2,
author = {Pan, Jeff Z. and Razniewski, Simon and Kalo, Jan-Christoph and Singhania, Sneha and Chen, Jiaoyan and Dietze, Stefan and Jabeen, Hajira and Omeliyanenko, Janna and Zhang, Wen and Lissandrini, Matteo and Biswas, Russa and de Melo, Gerard and Bonifati, Angela and Vakaj, Edlira and Dragoni, Mauro and Graux, Damien},
title = {{Large Language Models and Knowledge Graphs: Opportunities and Challenges}},
journal = {Transactions on Graph Data and Knowledge},
pages = {2:1--2:38},
year = {2023},
volume = {1},
number = {1},
publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik},
address = {Dagstuhl, Germany},
URL = {https://drops.dagstuhl.de/entities/document/10.4230/TGDK.1.1.2},
URN = {urn:nbn:de:0030-drops-194766},
doi = {10.4230/TGDK.1.1.2},
annote = {Keywords: Large Language Models, Pre-trained Language Models, Knowledge Graphs, Ontology, Retrieval Augmented Language Models}
}