Voice assistants like Siri enable us to control IoT devices conveniently with
voice commands, however, they also provide new attack opportunities for
adversaries. Previous papers attack voice assistants with obfuscated voice
commands by leveraging the gap between speech recognition system and human
voice perception. The limitation is that these obfuscated commands are audible
and thus conspicuous to device owners. In this paper, we propose a novel
mechanism to directly attack the microphone used for sensing voice data with
inaudible voice commands. We show that the adversary can exploit the
microphone's non-linearity and play well-designed inaudible ultrasounds to
cause the microphone to record normal voice commands, and thus control the
victim device inconspicuously. We demonstrate via end-to-end real-world
experiments that our inaudible voice commands can attack an Android phone and
an Amazon Echo device with high success rates at a range of 2-3 meters.
%0 Generic
%1 song2017inaudible
%A Song, Liwei
%A Mittal, Prateek
%D 2017
%K ba injection intermodulation microphone non-linearity recognition speech ultrasound
%T Inaudible Voice Commands
%U http://arxiv.org/abs/1708.07238
%X Voice assistants like Siri enable us to control IoT devices conveniently with
voice commands, however, they also provide new attack opportunities for
adversaries. Previous papers attack voice assistants with obfuscated voice
commands by leveraging the gap between speech recognition system and human
voice perception. The limitation is that these obfuscated commands are audible
and thus conspicuous to device owners. In this paper, we propose a novel
mechanism to directly attack the microphone used for sensing voice data with
inaudible voice commands. We show that the adversary can exploit the
microphone's non-linearity and play well-designed inaudible ultrasounds to
cause the microphone to record normal voice commands, and thus control the
victim device inconspicuously. We demonstrate via end-to-end real-world
experiments that our inaudible voice commands can attack an Android phone and
an Amazon Echo device with high success rates at a range of 2-3 meters.
@misc{song2017inaudible,
abstract = {Voice assistants like Siri enable us to control IoT devices conveniently with
voice commands, however, they also provide new attack opportunities for
adversaries. Previous papers attack voice assistants with obfuscated voice
commands by leveraging the gap between speech recognition system and human
voice perception. The limitation is that these obfuscated commands are audible
and thus conspicuous to device owners. In this paper, we propose a novel
mechanism to directly attack the microphone used for sensing voice data with
inaudible voice commands. We show that the adversary can exploit the
microphone's non-linearity and play well-designed inaudible ultrasounds to
cause the microphone to record normal voice commands, and thus control the
victim device inconspicuously. We demonstrate via end-to-end real-world
experiments that our inaudible voice commands can attack an Android phone and
an Amazon Echo device with high success rates at a range of 2-3 meters.},
added-at = {2018-07-17T15:47:13.000+0200},
author = {Song, Liwei and Mittal, Prateek},
biburl = {https://www.bibsonomy.org/bibtex/2d357ad622e801059d95d05986a9f77b0/chr},
description = {Inaudible Voice Commands},
interhash = {30b709fac8538358a0ef2c0973f6e7f9},
intrahash = {d357ad622e801059d95d05986a9f77b0},
keywords = {ba injection intermodulation microphone non-linearity recognition speech ultrasound},
note = {cite arxiv:1708.07238},
timestamp = {2018-07-17T15:47:13.000+0200},
title = {Inaudible Voice Commands},
url = {http://arxiv.org/abs/1708.07238},
year = 2017
}