@article{pégny2020the author = {Maël Pégny and Eva Thelisson and Issam Ibnouhsein}, title = {The Right to an Explanation}, journal = {Delphi - Interdisciplinary Review of Emerging Technologies}, volume = {2}, number = {4}, year = {2020}, abstract = {The opacity of some recent Machine Learning (ML) techniques have raised fundamental questions on their explainability, and prompted the creation of a research subdomain, Explainable Artificial Intelligence (XAI). Opacity would be particularly problematic if those methods were used in the context of administrative decision-making, since most democratic countries grant to their citizens a right to receive an explanation of the decisions affecting them. If this demand for explanation were not satisfied, the very use of AI methods in such contexts might be called into question. In this paper, we discuss and defend the relevance of an ideal right to an explanation. It is essential both for the efficiency and accountability of decision procedures, both for public administration and private entities controlling the access to essential social goods. We answer several objections against this right, which pretend that it would be at best inefficient in practice or at worst play the role of a legal smokescreen. If those worst-case scenarios are definitely in the realm of possibilities, they are by no means an essential vice of the right to an explanation. This right should not be dismissed, but defended and further studied to increase its practical relevance.}, url = {https://doi.org/10.21552/delphi/2019/4/5} doi = {10.21552/delphi/2019/4/5} }