Methods for 3D Gaze estimation.
We provide the code of 3D gaze estimaion methods in this page. We implement most of methods in PyTorch
.
If you use the codes in this page, please cite our survey:
@article{Cheng2021Survey,
title={Appearance-based Gaze Estimation With Deep Learning: A Review and Benchmark},
author={Yihua Cheng and Haofei Wang and Yiwei Bao and Feng Lu},
journal={arXiv preprint arXiv:2104.12668},
year={2021}
}
A Coarse-to-fine Adaptive Network for Appearance-based Gaze Estimation.
Links:
- Codes (author version), coming soon.
Paper Citation
Please also cite the original paper:
@InProceedings{Cheng_2020_AAAI,
author = {Yihua Cheng and Shiyao Huang and Fei Wang and Chen Qian and Feng Lu},
title = {A Coarse-to-fine Adaptive Network for Appearance-based Gaze Estimation},
booktitle = {Proceedings of the AAAI Conference on Artificial Intelligence (AAAI)},
year = {2020}
}
Appearance-Based Gaze Estimation Using Dilated-Convolutions
Links:
Paper Citation
Please also cite the original paper:
@InProceedings{Chen_2019_ACCV,
author={Chen, Zhaokang and Shi, Bertram E.},
title={Appearance-Based Gaze Estimation Using Dilated-Convolutions},
booktitle={ACCV},
year={2019},
pages={309--324},
isbn={978-3-030-20876-9}
}
Gaze360: Physically Unconstrained Gaze Estimation in the Wild
Links:
Paper Citation
Please also cite the original paper:
@InProceedings{Kellnhofer_2019_ICCV,
author = {Kellnhofer, Petr and Recasens, Adria and Stent, Simon and Matusik, Wojciech and Torralba, Antonio},
title = {Gaze360: Physically Unconstrained Gaze Estimation in the Wild},
booktitle = {The IEEE International Conference on Computer Vision (ICCV)},
month = {October},
year = {2019}
}
RT-GENE: Real-Time Eye Gaze Estimation in Natural Environments
Links:
Paper Citation
Please also cite the original paper:
@InProceedings{Fischer_2018_ECCV,
author = {Fischer, Tobias and Jin Chang, Hyung and Demiris, Yiannis},
title = {RT-GENE: Real-Time Eye Gaze Estimation in Natural Environments},
booktitle = {The European Conference on Computer Vision (ECCV)},
month = {September},
year = {2018}
}
MPIIGaze: Real-World Dataset and Deep Appearance-Based Gaze Estimation
Links:
Paper Citation
Please also cite the original paper:
@ARTICLE{Zhang_2017_tpami,
author={X. {Zhang} and Y. {Sugano} and M. {Fritz} and A. {Bulling}},
journal={IEEE Transactions on Pattern Analysis and Machine Intelligence},
title={MPIIGaze: Real-World Dataset and Deep Appearance-Based Gaze Estimation},
year={2019},
volume={41},
number={1},
pages={162-175},
doi={10.1109/TPAMI.2017.2778103},
ISSN={1939-3539},
month={Jan}
}
It’s written all over your face: Full-face appearance-based gaze estimation
Links:
Paper Citation
Please also cite the original paper:
@inproceedings{Zhang_2017_CVPRW,
title={It’s written all over your face: Full-face appearance-based gaze estimation},
author={Zhang, Xucong and Sugano, Yusuke and Fritz, Mario and Bulling, Andreas},
booktitle={The IEEE Conference on Computer Vision and Pattern Recognition Workshops (CVPRW)},
pages={2299--2308},
month={July},
year={2017},
organization={IEEE}
}
Appearance-Based Gaze Estimation in the Wild
Links:
Paper Citation
Please also cite the original paper:
@InProceedings{Zhang_2015_CVPR,
author = {Zhang, Xucong and Sugano, Yusuke and Fritz, Mario and Bulling, Andreas},
title = {Appearance-Based Gaze Estimation in the Wild},
booktitle = {The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)},
month = {June},
year = {2015}
}