@article{fdi:010086446, title = {{E}xplainable artificial intelligence for bayesian neural networks : toward trustworthy predictions of ocean dynamics}, author = {{C}lare, {M}. {C}. {A}. and {S}onnewald, {M}. and {L}guensat, {R}edouane and {D}eshayes, {J}. and {B}alaji, {V}.}, editor = {}, language = {{ENG}}, abstract = {{T}he trustworthiness of neural networks is often challenged because they lack the ability to express uncertainty and explain their skill. {T}his can be problematic given the increasing use of neural networks in high stakes decision-making such as in climate change applications. {W}e address both issues by successfully implementing a {B}ayesian {N}eural {N}etwork ({BNN}), where parameters are distributions rather than deterministic, and applying novel implementations of explainable {AI} ({XAI}) techniques. {T}he uncertainty analysis from the {BNN} provides a comprehensive overview of the prediction more suited to practitioners' needs than predictions from a classical neural network. {U}sing a {BNN} means we can calculate the entropy (i.e., uncertainty) of the predictions and determine if the probability of an outcome is statistically significant. {T}o enhance trustworthiness, we also spatially apply the two {XAI} techniques of {L}ayer-wise {R}elevance {P}ropagation ({LRP}) and {SH}apley {A}dditive ex{P}lanation ({SHAP}) values. {T}hese {XAI} methods reveal the extent to which the {BNN} is suitable and/or trustworthy. {U}sing two techniques gives a more holistic view of {BNN} skill and its uncertainty, as {LRP} considers neural network parameters, whereas {SHAP} considers changes to outputs. {W}e verify these techniques using comparison with intuition from physical theory. {T}he differences in explanation identify potential areas where new physical theory guided studies are needed.}, keywords = {deep learning ; changing climate ; dynamical ocean regimes ; {B}ayesian ; {N}eural {N}etworks ; explainable {AI} ; interpretability}, booktitle = {}, journal = {{J}ournal of {A}dvances in {M}odeling {E}arth {S}ystems}, volume = {14}, numero = {11}, pages = {e2022{MS}003162 [27 p.]}, year = {2022}, DOI = {10.1029/2022ms003162}, URL = {https://www.documentation.ird.fr/hor/fdi:010086446}, }