@InProceedings{CristinaPalmero2023, author="Cristina Palmero and Oleg V Komogortsev and Sergio Escalera and Sachin S Talathi", title="Multi-Rate Sensor Fusion for Unconstrained Near-Eye Gaze Estimation", booktitle="Proceedings of the 2023 Symposium on Eye Tracking Research and Applications", year="2023", pages="1--8", abstract="The power requirements of video-oculography systems can be prohibitive for high-speed operation on portable devices. Recently, low-power alternatives such as photosensors have been evaluated, providing gaze estimates at high frequency with a trade-off in accuracy and robustness. Potentially, an approach combining slow/high-fidelity and fast/low-fidelity sensors should be able to exploit their complementarity to track fast eye motion accurately and robustly. To foster research on this topic, we introduce OpenSFEDS, a near-eye gaze estimation dataset containing approximately 2M synthetic camera-photosensor image pairs sampled at 500 Hz under varied appearance and camera position. We also formulate the task of sensor fusion for gaze estimation, proposing a deep learning framework consisting in appearance-based encoding and temporal eye-state dynamics. We evaluate several single- and multi-rate fusion baselines on OpenSFEDS, achieving 8.7\% error decrease when tracking fast eye movements with a multi-rate approach vs. a gaze forecasting approach operating with a low-speed sensor alone.", optnote="HUPBA", optnote="exported from refbase (http://158.109.8.37/show.php?record=3923), last updated on Mon, 22 Jan 2024 12:42:26 +0100", opturl="https://doi.org/10.1145/3588015.3588407" }