@inproceedings{ec5926811a5f43319e73f983dd41689f,
title = "Information-theoretic analysis of epistemic uncertainty in Bayesian meta-learning",
abstract = "The overall predictive uncertainty of a trained predictor can be decomposed into separate contributions due to epistemic and aleatoric uncertainty. Under a Bayesian formulation, assuming a well-specified model, the two contributions can be exactly expressed (for the log-loss) or bounded (for more general losses) in terms of information-theoretic quantities (Xu and Raginsky [2020]). This paper addresses the study of epistemic uncertainty within an information-theoretic framework in the broader setting of Bayesian meta-learning. A general hierarchical Bayesian model is assumed in which hyperparameters determine the per-task priors of the model parameters. Exact characterizations (for the log-loss) and bounds (for more general losses) are derived for the epistemic uncertainty – quantified by the minimum excess meta-risk (MEMR)– of optimal meta-learning rules. This characterization is leveraged to bring insights into the dependence of the epistemic uncertainty on the number of tasks and on the amount of per-task training data. Experiments are presented that use the proposed information-theoretic bounds, evaluated via neural mutual information estimators, to compare the performance of conventional learning and meta-learning as the number of meta-learning tasks increases.",
author = "Sharu Jose and Sangwoo Park and Osvaldo Simeone",
year = "2022",
month = may,
day = "3",
language = "English",
series = "Proceedings of Machine Learning Research",
publisher = "Proceedings of Machine Learning Research",
pages = "9758--9775",
editor = "Gustau Camps-Valls and Ruiz, {Francisco J. R.} and Isabel Valera",
booktitle = "International Conference on Artificial Intelligence and Statistics, 28-30 March 2022, A Virtual Conference",
note = "The 25th International Conference on Artificial Intelligence and Statistics, AISTATS 2022 ; Conference date: 28-03-2022 Through 30-03-2022",
}