BibTex
@inproceedings{Alqahtani:2018:10.20380/GI2018.12,
author = {Alqahtani, Felwah and Reilly, Derek},
title = {It’s the Gesture That (re)Counts: Annotating While Running to Recall Affective Experience},
booktitle = {Proceedings of Graphics Interface 2018},
series = {GI 2018},
year = {2018},
isbn = {978-0-9947868-3-8},
location = {Toronto, Ontario},
pages = {84 -- 92},
numpages = {9},
doi = {10.20380/GI2018.12},
publisher = {Canadian Human-Computer Communications Society / Soci{\'e}t{\'e} canadienne du dialogue humain-machine},
}
Abstract
We present results from a study exploring whether gestural annotations of felt emotion presented on a map-based visualization can support recall of affective experience during recreational runs. We compare gestural annotations with audio and video notes and a “mental note” baseline. In our study, 20 runners were asked to record their emotional state at regular intervals while running a familiar route. Each runner used one of the four methods to capture emotion over four separate runs. Five days after the last run, runners used an interactive map-based visualization to review and recall their running experiences. Results indicate that gestural annotation promoted recall of affective experience more effectively than the baseline condition, as measured by confidence in recall and detail provided. Gestural annotation was also comparable to video and audio annotation in terms of recollection confidence and detail. Audio annotation supported recall primarily through the runner's spoken annotation, but sound in the background was sometimes used. Video annotation yielded the most detail, much directly related to visual cues in the video, however using video annotations required runners to stop during their runs. Given these results we propose that background logging of ambient sounds and video may supplement gestural annotation.