@inproceedings{b4bee34d790a4773be354d7c1d940e6f,
title = "Estimation of gazing points in environment using eye tracker and omnidirectional camera",
abstract = "In this work, we propose a method for estimating the user's gazing point in the environment using images taken by an eye tracker and an omnidirectional camera. The proposed method estimates the eye positon in environment by mapping the gazing point obtained by the eye tracker in the omnidirectional camera image. However, matching the omnidirectional image and the eye tracker image is difficult because the omnidirectional image is distorted by equirectangular projection. Therefore, we propose a method for estimating eye location in the omnidirectional image by matching the eye tracker image to the omnidirectional image with considering the distortion. Specifically, this method repeats image matching and image conversion using the matching results.",
keywords = "Cameras, Distortion, Estimation, Eyebrows, Image matching, Lenses, Mathematical model",
author = "Shun Chiba and Tomo Miyazaki and Yoshihiro Sugaya and Shinichiro Omachi",
year = "2015",
month = aug,
day = "20",
doi = "10.1109/ICCE-TW.2015.7217003",
language = "English",
series = "2015 IEEE International Conference on Consumer Electronics - Taiwan, ICCE-TW 2015",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "47--48",
booktitle = "2015 IEEE International Conference on Consumer Electronics - Taiwan, ICCE-TW 2015",
note = "2nd IEEE International Conference on Consumer Electronics - Taiwan, ICCE-TW 2015 ; Conference date: 06-06-2015 Through 08-06-2015",
}