@inproceedings{f0749a1f1e9746a2892473c8258b9a3b,
title = "An eye-tracking dataset for visual attention modelling in a virtual museum context",
abstract = "Predicting the user's visual attention enables a virtual reality (VR) environment to provide a context-aware and interactive user experience. Researchers have attempted to understand visual attention using eye-tracking data in a 2D plane. In this poster, we propose the first 3D eye-tracking dataset for visual attention modelling in the context of a virtual museum. It comprises about 7 million records and may facilitate visual attention modelling in a 3D VR space.",
keywords = "Eye-tracking datasets, Gaze detection, Neural networks, Visual attention",
author = "Yunzhan Zhou and Tian Feng and Shihui Shuai and Xiangdong Li and Lingyun Sun and Duh, {Henry B.L.}",
note = "Publisher Copyright: {\textcopyright} 2019 Association for Computing Machinery.; 17th ACM SIGGRAPH International Conference on Virtual-Reality Continuum and its Applications in Industry, VRCAI 2019 ; Conference date: 14-11-2019 Through 16-11-2019",
year = "2019",
month = nov,
day = "14",
doi = "10.1145/3359997.3365738",
language = "English",
series = "Proceedings - VRCAI 2019: 17th ACM SIGGRAPH International Conference on Virtual-Reality Continuum and its Applications in Industry",
publisher = "Association for Computing Machinery, Inc",
editor = "Spencer, {Stephen N.}",
booktitle = "Proceedings - VRCAI 2019",
}