@inproceedings{f7f438196ec24c77b736f793c2f55b89,
title = "Fine-grained generalization analysis of inductive matrix completion",
abstract = "In this paper, we bridge the gap between the state-of-the-art theoretical results for matrix completion with the nuclear norm and their equivalent in \textit{inductive matrix completion}: (1) In the distribution-free setting, we prove bounds improving the previously best scaling of O(rd2) to {\~O}(d3/2√r), where d is the dimension of the side information and r is the rank. (2) We introduce the (smoothed) \textit{adjusted trace-norm minimization} strategy, an inductive analogue of the weighted trace norm, for which we show guarantees of the order {\~O}(dr) under arbitrary sampling. In the inductive case, a similar rate was previously achieved only under uniform sampling and for exact recovery. Both our results align with the state of the art in the particular case of standard (non-inductive) matrix completion, where they are known to be tight up to log terms. Experiments further confirm that our strategy outperforms standard inductive matrix completion on various synthetic datasets and real problems, justifying its place as an important tool in the arsenal of methods for matrix completion using side information. ",
author = "Antoine Ledent and Rodrigo Alves and Yunwen Lei and Marius Kloft",
year = "2021",
month = dec,
day = "1",
language = "English",
isbn = "9781713845393",
series = "Advances in neural information processing systems",
publisher = "NeurIPS",
editor = "M. Ranzato and A. Beygelzimer and P.S. Liang and J.W. Vaughan and Y. Dauphin",
booktitle = "Advances in Neural Information Processing Systems 34 (NeurIPS 2021)",
note = "Thirty-fifth Conference on Neural Information Processing Systems, NeurIPS 2021 ; Conference date: 06-12-2021 Through 14-12-2021",
}