@inproceedings{30d4563e128b4990a6d24dddcb326484,
title = "Vari-Sound: A varifocal lens for sound",
abstract = "Centuries of development in optics have given us passive devices (i.e. lenses, mirrors and filters) to enrich audience immersivity with light effects, but there is nothing similar for sound. Beam-forming in concert halls and outdoor gigs still requires a large number of speakers, while headphones are still the state-of-the-art for personalized audio immersivity in VR. In this work, we show how 3D printed acoustic metasurfaces, assembled into the equivalent of optical systems, may offer a different solution. We demonstrate how to build them and how to use simple design tools, like the thin-lens equation, also for sound. We present some key acoustic devices, like a “collimator”, to transform a standard computer speaker into an acoustic “spotlight”; and a “magnifying glass”, to create sound sources coming from distinct locations than the speaker itself. Finally, we demonstrate an acoustic varifocal lens, discussing applications equivalent to auto-focus cameras and VR headsets and the limitations of the technology.",
keywords = "3D printing, Fabrication, Metamaterials, Microstructures, Spatial audio",
author = "Gianluca Memoli and Letizia Chisari and Eccles, {Jonathan P.} and Mihai Caleap and Drinkwater, {Bruce W.} and Sriram Subramanian",
year = "2019",
month = may,
day = "2",
doi = "10.1145/3290605.3300713",
language = "English",
series = "Conference on Human Factors in Computing Systems - Proceedings",
publisher = "Association for Computing Machinery (ACM)",
booktitle = "CHI 2019 - Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems",
address = "United States",
note = "2019 CHI Conference on Human Factors in Computing Systems, CHI 2019 ; Conference date: 04-05-2019 Through 09-05-2019",
}