abstract = {The identity of musical instruments is reflected in the acoustic attributes of musical notes played with them. Recently, it has been argued that these characteristics of musical identity (or timbre) can be best captured through an analysis that encompasses both time and frequency domains; with a focus on the modulations or changes in the signal in the spectrotemporal space. This representation mimics the spectrotemporal receptive field (STRF) analysis believed to underlie processing in the central mammalian auditory system, particularly at the level of primary auditory cortex. How well does this STRF representation capture timbral identity of musical instruments in continuous solo recordings remains unclear. The current work investigates the applicability of the STRF feature space for instrument recognition in solo musical phrases and explores best approaches to leveraging knowledge from isolated musical notes for instrument recognition in solo recordings. The study presents an approach for parsing solo performances into their individual note constituents and adapting back-end classifiers using support vector machines to achieve a generalization of instrument recognition to off-the-shelf, commercially available solo music.},
author = {Patil, Kailash and Elhilali, Mounya},
doi = {10.1186/s13636-015-0070-9},
issn = {1687-4722},
journal = {EURASIP Journal on Audio, Speech, and Music Processing},
number = {1},
pages = {27},
title = {{Biomimetic spectro-temporal features for music instrument recognition in isolated notes and solo phrases}},
url = {http://asmp.eurasipjournals.com/content/2015/1/27 https://asmp-eurasipjournals.springeropen.com/articles/10.1186/s13636-015-0070-9},
volume = {2015},
year = {2015}