,
Daniel Jung
,
Eldin Kurudzija
,
Anna Sztyber-Betley
,
Michał Syfert
,
Kai Dresia
,
Günther Waxenegger-Wilfing
,
Johan de Kleer
Creative Commons Attribution 4.0 International license
Fault diagnosis has been addressed in many research communities, leading to a variety of available fault diagnosis techniques. Deciding as a user which fault diagnosis methods are suitable for a specific application is thus a nontrivial task. Benchmarks can provide the community with a holistic understanding of the landscape of newly developed and available fault diagnosis methods when making this decision. After a long hiatus, we revived the DX Competition with three fault diagnosis benchmarks: SLIDe, LUMEN, and LiU-ICE. The purpose of the benchmarks is to inspire fault diagnosis research with challenging problems in cyber-physical systems relevant for industry. The benchmarks share a common code structure and we used similar performance metrics in order to simplify the adaptation of diagnosis system solutions to the different case studies.
@InProceedings{pill_et_al:OASIcs.DX.2025.14,
author = {Pill, Ingo and Jung, Daniel and Kurudzija, Eldin and Sztyber-Betley, Anna and Syfert, Micha{\l} and Dresia, Kai and Waxenegger-Wilfing, G\"{u}nther and de Kleer, Johan},
title = {{The DX Competition 2025 and Its Benchmarks}},
booktitle = {36th International Conference on Principles of Diagnosis and Resilient Systems (DX 2025)},
pages = {14:1--14:19},
series = {Open Access Series in Informatics (OASIcs)},
ISBN = {978-3-95977-394-2},
ISSN = {2190-6807},
year = {2025},
volume = {136},
editor = {Quinones-Grueiro, Marcos and Biswas, Gautam and Pill, Ingo},
publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik},
address = {Dagstuhl, Germany},
URL = {https://drops.dagstuhl.de/entities/document/10.4230/OASIcs.DX.2025.14},
URN = {urn:nbn:de:0030-drops-248030},
doi = {10.4230/OASIcs.DX.2025.14},
annote = {Keywords: Diagnosis, Algorithms, Evaluation}
}