The process of reconstructing experiences from human brain activity offers a
unique lens into how the brain interprets and represents the world. In this
paper, we introduce a method for reconstructing music from brain activity,
captured using functional magnetic resonance imaging (fMRI). Our approach uses
either music retrieval or the MusicLM music generation model conditioned on
embeddings derived from fMRI data. The generated music resembles the musical
stimuli that human subjects experienced, with respect to semantic properties
like genre, instrumentation, and mood. We investigate the relationship between
different components of MusicLM and brain activity through a voxel-wise
encoding modeling analysis. Furthermore, we discuss which brain regions
represent information derived from purely textual descriptions of music
stimuli. We provide supplementary material including examples of the
reconstructed music at https://google-research.github.io/seanet/brain2music
Description
Brain2Music: Reconstructing Music from Human Brain Activity
%0 Generic
%1 denk2023brain2music
%A Denk, Timo I.
%A Takagi, Yu
%A Matsuyama, Takuya
%A Agostinelli, Andrea
%A Nakai, Tomoya
%A Frank, Christian
%A Nishimoto, Shinji
%D 2023
%K ai brain music
%T Brain2Music: Reconstructing Music from Human Brain Activity
%U http://arxiv.org/abs/2307.11078
%X The process of reconstructing experiences from human brain activity offers a
unique lens into how the brain interprets and represents the world. In this
paper, we introduce a method for reconstructing music from brain activity,
captured using functional magnetic resonance imaging (fMRI). Our approach uses
either music retrieval or the MusicLM music generation model conditioned on
embeddings derived from fMRI data. The generated music resembles the musical
stimuli that human subjects experienced, with respect to semantic properties
like genre, instrumentation, and mood. We investigate the relationship between
different components of MusicLM and brain activity through a voxel-wise
encoding modeling analysis. Furthermore, we discuss which brain regions
represent information derived from purely textual descriptions of music
stimuli. We provide supplementary material including examples of the
reconstructed music at https://google-research.github.io/seanet/brain2music
@misc{denk2023brain2music,
abstract = {The process of reconstructing experiences from human brain activity offers a
unique lens into how the brain interprets and represents the world. In this
paper, we introduce a method for reconstructing music from brain activity,
captured using functional magnetic resonance imaging (fMRI). Our approach uses
either music retrieval or the MusicLM music generation model conditioned on
embeddings derived from fMRI data. The generated music resembles the musical
stimuli that human subjects experienced, with respect to semantic properties
like genre, instrumentation, and mood. We investigate the relationship between
different components of MusicLM and brain activity through a voxel-wise
encoding modeling analysis. Furthermore, we discuss which brain regions
represent information derived from purely textual descriptions of music
stimuli. We provide supplementary material including examples of the
reconstructed music at https://google-research.github.io/seanet/brain2music},
added-at = {2023-08-28T10:19:47.000+0200},
author = {Denk, Timo I. and Takagi, Yu and Matsuyama, Takuya and Agostinelli, Andrea and Nakai, Tomoya and Frank, Christian and Nishimoto, Shinji},
biburl = {https://www.bibsonomy.org/bibtex/2c680e40200c3a6f41b9cab289d4f4b02/lsll},
description = {Brain2Music: Reconstructing Music from Human Brain Activity},
interhash = {cdad3597b6cabd0fa14a96c3fe944e13},
intrahash = {c680e40200c3a6f41b9cab289d4f4b02},
keywords = {ai brain music},
note = {cite arxiv:2307.11078Comment: Preprint; 21 pages; supplementary material: https://google-research.github.io/seanet/brain2music},
timestamp = {2023-08-28T10:19:47.000+0200},
title = {Brain2Music: Reconstructing Music from Human Brain Activity},
url = {http://arxiv.org/abs/2307.11078},
year = 2023
}