@ARTICLE{10.3389/fnhum.2014.00787, AUTHOR={Joyal, Christian C. and Jacob, Laurence and Cigna, Marie-Hélène and Guay, Jean-Pierre and Renaud, Patrice}, TITLE={Virtual Faces Expressing Emotions: An Initial Concomitant and Construct Validity Study}, JOURNAL={Frontiers in Human Neuroscience}, VOLUME={8}, YEAR={2014}, URL={https://www.frontiersin.org/articles/10.3389/fnhum.2014.00787}, DOI={10.3389/fnhum.2014.00787}, ISSN={1662-5161}, ABSTRACT={Background: Facial expressions of emotions represent classic stimuli for the study of social cognition. Developing virtual dynamic facial expressions of emotions, however, would open-up possibilities, both for fundamental and clinical research. For instance, virtual faces allow real-time Human–Computer retroactions between physiological measures and the virtual agent.Objectives: The goal of this study was to initially assess concomitants and construct validity of a newly developed set of virtual faces expressing six fundamental emotions (happiness, surprise, anger, sadness, fear, and disgust). Recognition rates, facial electromyography (zygomatic major and corrugator supercilii muscles), and regional gaze fixation latencies (eyes and mouth regions) were compared in 41 adult volunteers (20 ♂, 21 ♀) during the presentation of video clips depicting real vs. virtual adults expressing emotions.Results: Emotions expressed by each set of stimuli were similarly recognized, both by men and women. Accordingly, both sets of stimuli elicited similar activation of facial muscles and similar ocular fixation times in eye regions from man and woman participants.Conclusion: Further validation studies can be performed with these virtual faces among clinical populations known to present social cognition difficulties. Brain–Computer Interface studies with feedback–feedforward interactions based on facial emotion expressions can also be conducted with these stimuli.} }