@inproceedings{7b721337618d4b369d1971738db54b0d,
title = "A novel dataset for real-life evaluation of facial expression recognition methodologies",
abstract = "One limitation seen among most of the previous methods is that they were evaluated under settings that are far from real-life scenarios. The reason is that the existing facial expression recognition (FER) datasets are mostly pose-based and assume a predefined setup. The expressions in these datasets are recorded using a fixed camera deployment with a constant background and static ambient settings. In a real-life scenario, FER systems are expected to deal with changing ambient conditions, dynamic background, varying camera angles, different face size, and other human-related variations. Accordingly, in this work, three FER datasets are collected over a period of six months, keeping in view the limitations of existing datasets. These datasets are collected from YouTube, real world talk shows, and real world interviews. The most widely used FER methodologies are implemented, and evaluated using these datasets to analyze their performance in real-life situations.",
keywords = "YouTube, Facial expression recognition, Feature Selection, Feature extraction, Recognition, METIS-318533, IR-101596, Real-world, EWI-27248",
author = "Siddiqi, {Muhammad Hameed} and Maqbool Ali and Muhammad Idris and {Banos Legran}, Oresti and Sungyoung Lee and Hyunseung Choo",
note = "eemcs-eprint-27248 ; 29th Canadian Conference on Artificial Intelligence, CCAI 2016 ; Conference date: 31-05-2016 Through 03-06-2016",
year = "2016",
month = may,
day = "31",
doi = "10.1007/978-3-319-34111-8_12",
language = "Undefined",
isbn = "978-3-319-34110-1",
series = "Lecture notes in artificial intelligence",
publisher = "Springer",
pages = "89--95",
booktitle = "29th Canadian Conference on Artificial Intelligence, CCAI 2016",
address = "Germany",
}