This extended abstract discusses various approaches to the constraining of Partially Observable Markov Decision Processes (POMDPs) using social norms and logical assertions in a dynamic logic framework. Whereas the exploitation of synergies among formal logic on the one hand and stochastic approaches and machine learning on the other is gaining significantly increasing interest since several years, most of the respective approaches fall into the category of relational learning in the widest sense, including inductive (stochastic) logic programming. In contrast, the use of formal knowledge (including knowledge about social norms) for the provision of hard constraints and prior knowledge for some stochastic learning or modeling task is much less frequently approached. Although we do not propose directly implementable technical solutions, it is hoped that this work is a useful contribution to a discussion about the usefulness and feasibility of approaches from norm research and formal logic in the context of stochastic behavioral models, and vice versa.
@InProceedings{nickles_et_al:DagSemProc.09121.25, author = {Nickles, Matthias and Rettinger, Achim}, title = {{Partially Observable Markov Decision Processes with Behavioral Norms}}, booktitle = {Normative Multi-Agent Systems}, pages = {1--13}, series = {Dagstuhl Seminar Proceedings (DagSemProc)}, ISSN = {1862-4405}, year = {2009}, volume = {9121}, editor = {Guido Boella and Pablo Noriega and Gabriella Pigozzi and Harko Verhagen}, publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik}, address = {Dagstuhl, Germany}, URL = {https://drops.dagstuhl.de/entities/document/10.4230/DagSemProc.09121.25}, URN = {urn:nbn:de:0030-drops-19134}, doi = {10.4230/DagSemProc.09121.25}, annote = {Keywords: Norms, Partially Observable Markov Decision Processes, Deontic Logic, Propositional Dynamic Logic} }
Feedback for Dagstuhl Publishing