@inproceedings{Patil2013a,
abstract = {How do humans attend to and pick out relevant auditory objects amongst all other sounds in the environment? Based on neurophysiological findings we propose two task oriented attentional mechanisms acting as Bayesian priors which act on two separate levels of processing: a sensory mapping stage and object representation stage. The former sensory stage is modeled as a high dimensional mapping which captures the spectrotemporal nuances and cues of auditory objects. The latter object representation stage then captures the statistical distribution of the different classes of acoustic scenes. This scheme shows a relative improvement in performance by 81{\%} compared to a baseline system. {\textcopyright} 2013 IEEE.},
author = {Patil, Kailash and Elhilali, Mounya},
booktitle = {IEEE International Conference on Acoustics, Speech and Signal Processing},
doi = {10.1109/ICASSP.2013.6637764},
isbn = {978-1-4799-0356-6},
issn = {15206149},
keywords = {Acoustic Scene Analysis,Auditory Attention,Object based attention,Sensory Processing},
pages = {828--832},
title = {{Task-driven attentional mechanisms for auditory scene recognition}},
url = {http://ieeexplore.ieee.org/document/6637764/},
year = {2013}
}