Demonstrating Interactive Machine Learning Tools for Rapid Prototyping of Gestural Instruments in the Browser
These demonstrations will allow visitors to prototype gestural, interactive musical instruments in the browser. Different browser based synthesisers can be controlled by either a Leap Motion sensor or a Myo armband. The visitor will be able to use an interactive machine learning toolkit to quickly and iteratively explore different interaction possibilities. The demonstrations show how interactive, browser-based machine learning tools can be used to rapidly prototype gestural controllers for audio. These demonstrations showcase RapidLib, a browser based machine learning library developed through the RAPID-MIX project.
@inproceedings{2017_EA_10,
abstract = {These demonstrations will allow visitors to prototype gestural, interactive musical instruments in the browser. Different browser based synthesisers can be controlled by either a Leap Motion sensor or a Myo armband. The visitor will be able to use an interactive machine learning toolkit to quickly and iteratively explore different interaction possibilities. The demonstrations show how interactive, browser-based machine learning tools can be used to rapidly prototype gestural controllers for audio. These demonstrations showcase RapidLib, a browser based machine learning library developed through the RAPID-MIX project.},
address = {London, United Kingdom},
author = {Parkinson, Adam and Zbyszynski, Michael and Bernardo, Francisco},
booktitle = {Proceedings of the International Web Audio Conference},
editor = {Thalmann, Florian and Ewert, Sebastian},
month = {August},
pages = {},
publisher = {Queen Mary University of London},
series = {WAC '17},
title = {Demonstrating Interactive Machine Learning Tools for Rapid Prototyping of Gestural Instruments in the Browser},
year = {2017},
ISSN = {2663-5844}
}