@inproceedings { melo12, abstract = {Decentralized partially observable Markov decision processes (Dec-POMDPs) provide powerful modeling tools for multiagent decision-making in the face of uncertainty, but solving these models comes at a very high computational cost. Two avenues for side-stepping the computational burden can be identified: structured interactions between agents and intra-agent communication. In this paper, we focus on the interplay between these concepts, namely how sparse interactions reflect in the communication needs. A key insight is that in domains with local interactions the amount of communication necessary for successful joint behavior can be heavily reduced, due to the limited influence between agents. We exploit this insight by deriving local POMDP models that optimize each agent's communication behavior. Our experimental results show that our approach successfully exploits sparse interactions: we can effectively identify the situations in which it is beneficial to communicate, as well as trade off the cost of communication with overall task performance.}, address = {Valencia, Spain}, booktitle = {Proceedings of the Workshop on Multiagent Sequential Decision Making Under Uncertainty (MSDM-2012)}, keywords = {Coordination, Planning, Communication in Dec-MDPs, Sparse Interactions, QueryPOMDP}, month = {June}, pages = {40-47}, title = {Exploiting Sparse Interactions for Optimizing Communication in Dec-MDPs}, year = {2012}, author = {Francisco S. Melo and Matthijs Spaan and Stefan Witwicki} }