Touchscreens enable intuitive mobile interaction. However, touch input is limited to 2D touch locations which makes it challenging to provide shortcuts and secondary actions similar to hardware keyboards and mice. This project studies the use of the raw measurements of capacitive touchscreens and the latest deep learning algorithms to identify the finger that is touching the display. The resulting models can differentiate between the main fingers for interaction (i.e. thumb and index finger) and other fingers with accuracies up to 92%. The results were presented at the ACM Conference on Intelligent User Interfaces (IUI ’19) in Los Angeles, CA, USA in March 2019.
The deep learning models and the full data set of this project are available on the project’s GitHub page. The models can be used by other researchers and practitioners for further investigations of finger identification. The data set contains over 455,000 capacitive images representing touches of all fingers and could be used to train further identification models or improve our results with steady advances in machine learning research.
@inproceedings{le2019investigating,
author = {Le, Huy Viet and Mayer, Sven and Henze, Niels},
title = {Investigating the Feasibility of Finger Identification on Capacitive Touchscreens Using Deep Learning},
booktitle = {Proceedings of the 24th International Conference on Intelligent User Interfaces},
series = {IUI '19},
year = {2019},
isbn = {978-1-4503-6272-6},
location = {Marina del Ray, California},
pages = {637--649},
numpages = {13},
url = {http://doi.acm.org/10.1145/3301275.3302295},
doi = {10.1145/3301275.3302295},
acmid = {3302295},
publisher = {ACM},
address = {New York, NY, USA},
keywords = {capacitive image, deep learning, finger identification, finger-aware interaction, smartphone, touchscreen},
}