<?xml version="1.0" encoding="UTF-8"?>
<TEI xml:space="preserve" xmlns="http://www.tei-c.org/ns/1.0" 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
xsi:schemaLocation="http://www.tei-c.org/ns/1.0 https://raw.githubusercontent.com/kermitt2/grobid/master/grobid-home/schemas/xsd/Grobid.xsd"
 xmlns:xlink="http://www.w3.org/1999/xlink">
	<teiHeader xml:lang="en">
		<fileDesc>
			<titleStmt>
				<title level="a" type="main">SnAIR Drum: A Gesture Interface for Rhythm Practice</title>
			</titleStmt>
			<publicationStmt>
				<publisher/>
				<availability status="unknown"><licence/></availability>
			</publicationStmt>
			<sourceDesc>
				<biblStruct>
					<analytic>
						<author>
							<persName><forename type="first">Federico</forename><surname>Cau</surname></persName>
							<affiliation key="aff0">
								<orgName type="department">Computer Science</orgName>
								<orgName type="institution">University of Cagliari</orgName>
								<address>
									<addrLine>Via Ospedale 72</addrLine>
									<postCode>09124</postCode>
									<settlement>Cagliari</settlement>
									<country key="IT">Italy</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Dept</forename><surname>Mathematics</surname></persName>
							<affiliation key="aff0">
								<orgName type="department">Computer Science</orgName>
								<orgName type="institution">University of Cagliari</orgName>
								<address>
									<addrLine>Via Ospedale 72</addrLine>
									<postCode>09124</postCode>
									<settlement>Cagliari</settlement>
									<country key="IT">Italy</country>
								</address>
							</affiliation>
							<affiliation key="aff2">
								<orgName type="department">Computer Science</orgName>
								<orgName type="institution">University of Cagliari</orgName>
								<address>
									<addrLine>Via Ospedale 72</addrLine>
									<postCode>09124</postCode>
									<settlement>Cagliari</settlement>
									<country key="IT">Italy</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Alessandro</forename><surname>Carcangiu</surname></persName>
							<email>alessandro.carcangiu@diee.unica.it</email>
							<affiliation key="aff1">
								<orgName type="department">Dept. Electric and Electronic Engineering</orgName>
								<orgName type="institution">University of Cagliari</orgName>
								<address>
									<addrLine>Via Marengo 2</addrLine>
									<postCode>09123</postCode>
									<settlement>Cagliari</settlement>
									<country key="IT">Italy</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Fabio</forename><surname>Sorrentino</surname></persName>
							<email>fabio.sorrentino@unica.it</email>
							<affiliation key="aff2">
								<orgName type="department">Computer Science</orgName>
								<orgName type="institution">University of Cagliari</orgName>
								<address>
									<addrLine>Via Ospedale 72</addrLine>
									<postCode>09124</postCode>
									<settlement>Cagliari</settlement>
									<country key="IT">Italy</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Lucio</forename><surname>Davide</surname></persName>
							<email>davide.spano@unica.it</email>
							<affiliation key="aff3">
								<orgName type="department">Computer Science</orgName>
								<orgName type="institution">University of Cagliari</orgName>
								<address>
									<addrLine>Via Ospedale 72</addrLine>
									<postCode>09124</postCode>
									<settlement>Cagliari</settlement>
									<country key="IT">Italy</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><forename type="first">Spano</forename><surname>Dept</surname></persName>
							<affiliation key="aff3">
								<orgName type="department">Computer Science</orgName>
								<orgName type="institution">University of Cagliari</orgName>
								<address>
									<addrLine>Via Ospedale 72</addrLine>
									<postCode>09124</postCode>
									<settlement>Cagliari</settlement>
									<country key="IT">Italy</country>
								</address>
							</affiliation>
						</author>
						<author>
							<persName><surname>Mathematics</surname></persName>
							<affiliation key="aff3">
								<orgName type="department">Computer Science</orgName>
								<orgName type="institution">University of Cagliari</orgName>
								<address>
									<addrLine>Via Ospedale 72</addrLine>
									<postCode>09124</postCode>
									<settlement>Cagliari</settlement>
									<country key="IT">Italy</country>
								</address>
							</affiliation>
						</author>
						<title level="a" type="main">SnAIR Drum: A Gesture Interface for Rhythm Practice</title>
					</analytic>
					<monogr>
						<imprint>
							<date/>
						</imprint>
					</monogr>
					<idno type="MD5">FFD99408E7733AC471372446461B3E91</idno>
				</biblStruct>
			</sourceDesc>
		</fileDesc>
		<encodingDesc>
			<appInfo>
				<application version="0.7.2" ident="GROBID" when="2023-03-24T03:19+0000">
					<desc>GROBID - A machine learning software for extracting information from scholarly documents</desc>
					<ref target="https://github.com/kermitt2/grobid"/>
				</application>
			</appInfo>
		</encodingDesc>
		<profileDesc>
			<textClass>
				<keywords>
					<term>Gesture Interface</term>
					<term>Music</term>
					<term>Rhythm practice</term>
					<term>Drums</term>
					<term>Leap Motion</term>
					<term>3D User Interface</term>
					<term>WebGL</term>
					<term>H.5.m [Information interfaces and presentation (e.g., HCI)]: Miscellaneous</term>
				</keywords>
			</textClass>
			<abstract>
<div xmlns="http://www.tei-c.org/ns/1.0"><p>The availability of standard devices for gestural interaction supports the distribution on the web of interactive teaching material for learning music. In this paper, we focus on rhythm practice, introducing an exercise platform that exploits gestural interaction for tracking the user's movements and a web-based environment for loading exercises and checking his/her performance. We discuss the design and the technical implementation of the solution.</p></div>
			</abstract>
		</profileDesc>
	</teiHeader>
	<text xml:lang="en">
		<body>
<div xmlns="http://www.tei-c.org/ns/1.0"><head>Introduction</head><p>The rhythm or groove in music is an intuitive concept that is really hard to put into practice without a good level of exercise. It consists of the skills needed by a player for using a rhythm sequence that is repeated inside the song temporal flow, maintaining the coherence in the sound progression. All musicians must learn or develop these skills since it is required for mastering an instrument.</p><p>Usually, these skills are developed through self-study and occasional lessons with a music teacher. Therefore, in most of the time spent in practising the student is alone and this leads to situations where mistakes are not noticed by students.</p><p>The evolution of interaction modalities that are currently supported by the web opens this environment to more advanced applications for learning music theory. In this work, we introduce SnAIR Drum, a gestural interface combined with score visualisation for practising the drums. The application interface supports the user in learning how to follow the specified rhythm, how to read the score relying on the body coordination. </p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head>Related work</head><p>The idea of applying gestural interaction for recognising movements related to the percussive instruments has been widely investigated in the literature. The recognition technology has a huge impact on the effectiveness of these interfaces since the precision and the latency has a key role in their usability. Tindale et al. <ref type="bibr" target="#b6">[7]</ref> discuss different tracking techniques and provide an evaluation of their pros and cons. Bott et al. <ref type="bibr" target="#b0">[1]</ref> used a general purpose gesture input device such as the Nintendo WiiMote for controlling playing a drum game, studying the spatial positioning of the interactive elements for remotes. A similar attempt with a different gestural device (Microsoft Kinect) is discussed in <ref type="bibr" target="#b1">[2]</ref>. The designed interface allows the collaboration of more than one musician.</p><p>Commercial applications for playing the drums through gestural interaction exist in the market. In particular, we can mention here AirBeats <ref type="bibr" target="#b2">[3]</ref>, a Leap Motion-based application that allows users to create rhythms with different styles: Hip-Hop, Dubstep, Trance etc. AirBeats allows using fingers and sticks for playing virtual pads with mid-air movements.</p><p>In this work we set a different objective for the interface: we do not focus on the expressiveness of the musician, but on the exploitation of the gestural input for teaching purposes, including score reading.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head>User Interface</head><p>SnAIR Drum is a web application that on relies on gestural input for completing percussion exercises. It combines both a simulation of the spatial positioning of the different parts, together with the visualisation of the score and a support for reading it during the exercise.</p><p>The SnAIR drum interface is shown in Figure <ref type="figure" target="#fig_1">2</ref>. In the left part, it visualises the different drum pieces, while the right part shows the score for the exercise. The user plays the virtual drum with his/her fingertips: the movements are shown in Figure <ref type="figure" target="#fig_0">1</ref>. In order to support the coordination between the hand movements and the effects on the virtual pads, the interface shows a stylised representation of the user's hands (see Figure <ref type="figure" target="#fig_1">2</ref>). The drum parts positioning tries to balance two different needs: the first is the correspondence between the positioning of a piece in a real drum set and in the virtual environment. The second is the gestural tracking device limitation, that provides information only on the hand movements. Therefore, we cannot rely on legs for controlling e.g., the kick and the hit-hat.</p><p>We positioned the drum pieces in three concentric circles (see Figure <ref type="figure" target="#fig_1">2</ref>, left part): in the inner one, we positioned those controlled with feet and the snare, in the middle one the toms, while cymbals are in the outer circle. The interaction with the pieces is very simple: each one is contained in a box. When one of the fingertips touches one these boxes, the application emits the corresponding piece sound and changes the colour of the corresponding bounding box.  The exercise support works as follows. The user loads through a menu the audio file containing the exercise musical base and the score of the drum part in the song. When the user pushes the play button (Figure <ref type="figure" target="#fig_1">2</ref> top part), the application plays the exercise base and the score representation highlights the current note that should be played by the user. According to the user's input, the tool shows a green feedback when if the user played the right piece at the right time, while it shows a red icon otherwise. In Figure <ref type="figure" target="#fig_1">2</ref> we show an example of error feedback.</p><p>The adherence of the user to the score is computed automatically by the application defining time buffers around the note duration. The scheme is shown in Figure <ref type="figure" target="#fig_2">3</ref>: according to the note duration and to the song tempo, the application creates a buffer around the note duration, in order to be tolerant to small imperfections in the user's performance. If the user beats the correct piece in the green interval, the application considers the execution correct, and wrong otherwise. It is worth pointing out that there may be differences between the time when the user performs the movement and the instant sensed by the application due to input and elaboration latency, as we discuss in the following section.</p><p>A demo of the application is available at the following URL: https://youtu.be/P9Gm5CEAvI4.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head>Implementation</head><p>The implementation consists of four main components, which control the main elements of the interface.</p><p>The first is the gestural input component, based on the Leap Motion SDK <ref type="bibr" target="#b4">[5]</ref>. It supports the hand tracking, providing a hand-skeleton abstraction. The SDK input stream updates the position and orientation of the on-screen stylised representation of the user's hands.</p><p>The 3D elements in the user interface rely on Three.JS <ref type="bibr" target="#b5">[6]</ref>, a high-level Javascript library for managing WebGL <ref type="bibr" target="#b3">[4]</ref> contents inside HTML pages. This component draws the hand representation, the cubes related to the drum kit parts, detecting the collision with the fingertips.</p><p>The music score and playing component controls the on screen score visualisation, the execution feedback, the base and the drum playing. The visualization exploits the Abcweb.js library <ref type="bibr" target="#b7">[8]</ref>. It is able to read the XML definition of the score and to synchronise it with a musical base, moving a cursor on the different notes according to song tempo.</p><p>From the same score description, the application derives a queue of events corresponding to the correct execution of the song, which is the temporal distribution of the green areas in Figure <ref type="figure" target="#fig_2">3</ref>. This event queue, together with the ones raised by the input device, is used for checking the user's performance.</p></div>
<div xmlns="http://www.tei-c.org/ns/1.0"><head>Conclusion and Future Work</head><p>In this paper, we introduced SnAIR Drum, a web-based application for supporting rhythm exercises through gestural interaction. It is built on top of standard web technologies and it is easy to distribute as teaching material, for complementing existing text, video and audio files.</p><p>The application is designed for supporting a proper music training, including means for enhancing the physical coordination and the user's ability to read and understand the score. In addition, it supports an automated performance checking that provides feedback to the learner.</p><p>In future work, we aim at refining the platform, adding the content authoring part. Complementing this part of the application would enable the creation of an exercise sharing platform for promoting the instrument learning. Finally, we are planning to evaluate the exercise platform with different user tests with both student and music teachers.</p></div><figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_0"><head>Figure 1 :</head><label>1</label><figDesc>Figure 1: Interaction with SnAIR Drum. The user taps with his/her fingers virtual taps in mid-air.</figDesc><graphic coords="2,35.46,207.80,129.54,72.87" type="bitmap" /></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_1"><head>Figure 2 :</head><label>2</label><figDesc>Figure 2: SnAIR Drum interface.</figDesc><graphic coords="3,237.42,112.46,407.16,202.69" type="bitmap" /></figure>
<figure xmlns="http://www.tei-c.org/ns/1.0" xml:id="fig_2"><head>Figure 3 :</head><label>3</label><figDesc>Figure 3: User's performance evaluation. According to the note duration, the application checks whether the beat is contained into the time buffer (green) or not (red).</figDesc><graphic coords="3,35.46,361.20,129.60,61.78" type="bitmap" /></figure>
		</body>
		<back>
			<div type="references">

				<listBibl>

<biblStruct xml:id="b0">
	<analytic>
		<title level="a" type="main">Exploring 3D Gestural Interfaces for Music Creation in Video Games</title>
		<author>
			<persName><forename type="first">Jared</forename><forename type="middle">N</forename><surname>Bott</surname></persName>
		</author>
		<author>
			<persName><forename type="first">James</forename><forename type="middle">G</forename><surname>Crowley</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Joseph</forename><forename type="middle">J</forename><surname>Laviola</surname><genName>Jr</genName></persName>
		</author>
	</analytic>
	<monogr>
		<title level="m">Proceedings of FDG &apos;09</title>
				<meeting>FDG &apos;09<address><addrLine>New York, NY, USA</addrLine></address></meeting>
		<imprint>
			<publisher>ACM</publisher>
			<date type="published" when="2009">2009</date>
			<biblScope unit="page" from="18" to="25" />
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b1">
	<analytic>
		<title level="a" type="main">Nuvolet: 3D Gesture-driven Collaborative Audio Mosaicing</title>
		<author>
			<persName><forename type="first">Alex</forename><surname>Josep M Comajuncosas</surname></persName>
		</author>
		<author>
			<persName><surname>Barrachina</surname></persName>
		</author>
		<author>
			<persName><forename type="first">O'</forename><surname>John</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Enric</forename><surname>Connell</surname></persName>
		</author>
		<author>
			<persName><surname>Guaus</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="m">NIME</title>
				<imprint>
			<date type="published" when="2011">2011</date>
			<biblScope unit="page" from="252" to="255" />
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b2">
	<monogr>
		<author>
			<persName><surname>Handwavy</surname></persName>
		</author>
		<ptr target="https://gallery.leapmotion.com/airbeats/" />
		<title level="m">AirBeats</title>
				<imprint>
			<date type="published" when="2017-07-16">2017. 2017. 2017-07-16</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b3">
	<monogr>
		<author>
			<persName><forename type="first">Kronos</forename><surname>Group</surname></persName>
		</author>
		<ptr target="https://www.khronos.org/registry/webgl/specs/latest/2.0/.(????" />
		<title level="m">WebGL 2.0 Specification</title>
				<imprint>
			<date type="published" when="2017-07-16">2017-07-16</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b4">
	<monogr>
		<author>
			<persName><forename type="first">Leap</forename><surname>Motion</surname></persName>
		</author>
		<ptr target="https://developer.leapmotion.com/" />
		<title level="m">Leap Motion Developer</title>
				<imprint>
			<date type="published" when="2017-07-16">2017. 2017. 2017-07-16</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b5">
	<monogr>
		<author>
			<persName><forename type="first">J</forename><forename type="middle">S</forename><surname>Three</surname></persName>
		</author>
		<ptr target="https://threejs.org/" />
		<title level="m">Three JS documentation</title>
				<imprint>
			<date type="published" when="2017-07-16">2017. 2017. 2017-07-16</date>
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b6">
	<analytic>
		<title level="a" type="main">A Comparison of Sensor Strategies for Capturing Percussive Gestures</title>
		<author>
			<persName><forename type="first">Adam</forename><forename type="middle">R</forename><surname>Tindale</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Ajay</forename><surname>Kapur</surname></persName>
		</author>
		<author>
			<persName><forename type="first">George</forename><surname>Tzanetakis</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Peter</forename><surname>Driessen</surname></persName>
		</author>
		<author>
			<persName><forename type="first">Andrew</forename><surname>Schloss</surname></persName>
		</author>
	</analytic>
	<monogr>
		<title level="m">Proceedings of NIME &apos;05</title>
				<meeting>NIME &apos;05<address><addrLine>Singapore, Singapore</addrLine></address></meeting>
		<imprint>
			<publisher>National University of Singapore</publisher>
			<date type="published" when="2005">2005</date>
			<biblScope unit="page" from="200" to="203" />
		</imprint>
	</monogr>
</biblStruct>

<biblStruct xml:id="b7">
	<monogr>
		<author>
			<persName><forename type="first">Vree</forename><surname>Wim</surname></persName>
		</author>
		<ptr target="https://wim.vree.org/js/index.html" />
		<title level="m">Abcweb.js library</title>
				<imprint>
			<date type="published" when="2017">2017. 2017</date>
		</imprint>
	</monogr>
</biblStruct>

				</listBibl>
			</div>
		</back>
	</text>
</TEI>
