@inproceedings{9751c178bd044ec39db2351a163bf72b,
title = "A Transformer-Based Model for the Prediction of Human Gaze Behavior on Videos",
abstract = "Eye-tracking applications that utilize the human gaze in video understanding tasks have become increasingly important. To effectively automate the process of video analysis based on eye-tracking data, it is important to accurately replicate human gaze behavior. However, this task presents significant challenges due to the inherent complexity and ambiguity of human gaze patterns. In this work, we introduce a novel method for simulating human gaze behavior. Our approach uses a transformer-based reinforcement learning algorithm to train an agent that acts as a human observer, with the primary role of watching videos and simulating human gaze behavior. We employed an eye-tracking dataset gathered from videos generated by the VirtualHome simulator, with a primary focus on activity recognition. Our experimental results demonstrate the effectiveness of our gaze prediction method by highlighting its capability to replicate human gaze behavior and its applicability for downstream tasks where real human-gaze is used as input.",
keywords = "Action recognition, Eye-tracking, Human attention, Human gaze prediction",
author = "S{\"u}leyman {\"O}zdel and Yao Rong and Albaba, {Berat Mert} and Kuo, {Yen Ling} and Xi Wang and Enkelejda Kasneci",
note = "Publisher Copyright: {\textcopyright} 2024 ACM.; 16th Annual ACM Symposium on Eye Tracking Research and Applications, ETRA 2024 ; Conference date: 04-06-2024 Through 07-06-2024",
year = "2024",
month = jun,
day = "4",
doi = "10.1145/3649902.3653439",
language = "English",
series = "Eye Tracking Research and Applications Symposium (ETRA)",
publisher = "Association for Computing Machinery",
editor = "Spencer, {Stephen N.}",
booktitle = "Proceedings - ETRA 2024, ACM Symposium on Eye Tracking Research and Applications",
}