Modern recommender systems leverage large-scale retrieval models consisting
of two stages: training a dual-encoder model to embed queries and candidates in
the same space, followed by an Approximate Nearest Neighbor (ANN) search to
select top candidates given a query's embedding. In this paper, we propose a
new single-stage paradigm: a generative retrieval model which autoregressively
decodes the identifiers for the target candidates in one phase. To do this,
instead of assigning randomly generated atomic IDs to each item, we generate
Semantic IDs: a semantically meaningful tuple of codewords for each item that
serves as its unique identifier. We use a hierarchical method called RQ-VAE to
generate these codewords. Once we have the Semantic IDs for all the items, a
Transformer based sequence-to-sequence model is trained to predict the Semantic
ID of the next item. Since this model predicts the tuple of codewords
identifying the next item directly in an autoregressive manner, it can be
considered a generative retrieval model. We show that our recommender system
trained in this new paradigm improves the results achieved by current SOTA
models on the Amazon dataset. Moreover, we demonstrate that the
sequence-to-sequence model coupled with hierarchical Semantic IDs offers better
generalization and hence improves retrieval of cold-start items for
recommendations.
%0 Generic
%1 rajput2023recommender
%A Rajput, Shashank
%A Mehta, Nikhil
%A Singh, Anima
%A Keshavan, Raghunandan H.
%A Vu, Trung
%A Heldt, Lukasz
%A Hong, Lichan
%A Tay, Yi
%A Tran, Vinh Q.
%A Samost, Jonah
%A Kula, Maciej
%A Chi, Ed H.
%A Sathiamoorthy, Maheswaran
%D 2023
%K llm retrieval
%T Recommender Systems with Generative Retrieval
%U http://arxiv.org/abs/2305.05065
%X Modern recommender systems leverage large-scale retrieval models consisting
of two stages: training a dual-encoder model to embed queries and candidates in
the same space, followed by an Approximate Nearest Neighbor (ANN) search to
select top candidates given a query's embedding. In this paper, we propose a
new single-stage paradigm: a generative retrieval model which autoregressively
decodes the identifiers for the target candidates in one phase. To do this,
instead of assigning randomly generated atomic IDs to each item, we generate
Semantic IDs: a semantically meaningful tuple of codewords for each item that
serves as its unique identifier. We use a hierarchical method called RQ-VAE to
generate these codewords. Once we have the Semantic IDs for all the items, a
Transformer based sequence-to-sequence model is trained to predict the Semantic
ID of the next item. Since this model predicts the tuple of codewords
identifying the next item directly in an autoregressive manner, it can be
considered a generative retrieval model. We show that our recommender system
trained in this new paradigm improves the results achieved by current SOTA
models on the Amazon dataset. Moreover, we demonstrate that the
sequence-to-sequence model coupled with hierarchical Semantic IDs offers better
generalization and hence improves retrieval of cold-start items for
recommendations.
@misc{rajput2023recommender,
abstract = {Modern recommender systems leverage large-scale retrieval models consisting
of two stages: training a dual-encoder model to embed queries and candidates in
the same space, followed by an Approximate Nearest Neighbor (ANN) search to
select top candidates given a query's embedding. In this paper, we propose a
new single-stage paradigm: a generative retrieval model which autoregressively
decodes the identifiers for the target candidates in one phase. To do this,
instead of assigning randomly generated atomic IDs to each item, we generate
Semantic IDs: a semantically meaningful tuple of codewords for each item that
serves as its unique identifier. We use a hierarchical method called RQ-VAE to
generate these codewords. Once we have the Semantic IDs for all the items, a
Transformer based sequence-to-sequence model is trained to predict the Semantic
ID of the next item. Since this model predicts the tuple of codewords
identifying the next item directly in an autoregressive manner, it can be
considered a generative retrieval model. We show that our recommender system
trained in this new paradigm improves the results achieved by current SOTA
models on the Amazon dataset. Moreover, we demonstrate that the
sequence-to-sequence model coupled with hierarchical Semantic IDs offers better
generalization and hence improves retrieval of cold-start items for
recommendations.},
added-at = {2023-08-17T15:01:11.000+0200},
author = {Rajput, Shashank and Mehta, Nikhil and Singh, Anima and Keshavan, Raghunandan H. and Vu, Trung and Heldt, Lukasz and Hong, Lichan and Tay, Yi and Tran, Vinh Q. and Samost, Jonah and Kula, Maciej and Chi, Ed H. and Sathiamoorthy, Maheswaran},
biburl = {https://www.bibsonomy.org/bibtex/2910ede02c32cd9de74015b453431ae24/lisa-ee},
description = {Recommender Systems with Generative Retrieval},
interhash = {03a4a4a0ec7666e67faaff6dbb5e521f},
intrahash = {910ede02c32cd9de74015b453431ae24},
keywords = {llm retrieval},
note = {cite arxiv:2305.05065},
timestamp = {2023-08-17T15:01:11.000+0200},
title = {Recommender Systems with Generative Retrieval},
url = {http://arxiv.org/abs/2305.05065},
year = 2023
}