Automatically generated by Mendeley Desktop 1.13.2 Any changes to this file will be lost if it is regenerated by Mendeley. BibTeX export options can be customized via Options -> BibTeX in Mendeley Desktop @inproceedings{Courgeon2008, abstract = {Designing affective user interfaces involving expressive characters raises several questions. The system should be able to display facial expressions of complex emotions as dynamic and realtime reactions to user’s inputs. From a cognitive point of view, designers need to know how the user will perceive the dynamics of these facial expressions as a function of his/her input. We aim at evaluating if users can perceive different expressive profiles of a virtual character by manually controlling its expressions and observing its reaction to his/her input. This paper describes our platform that enables a virtual character to display blended facial expressions of emotions as realtime continuous reactions to users’ gesture input. We explain the techniques underlying the computation of intermediate facial expressions of emotion, and their control in the 3D space PAD (Pleasure, Arousal, Dominance) using gesture input. Preliminary results of a perceptive study show the potential of such an approach for assessing the dynamics of the perception of emotional expressions during gesture interaction with virtual characters endowed with different expressive profiles.}, address = {Estoril, Portugal}, annote = {The user reports his/her affective state in real-time with changing the position of a 3D point in PAD space using a joystick. The system captures the position of that point and maps it to a blend of 8 selected emotions. }, author = {Courgeon, Matthieu and Martin, Jean-claude and Jacquemin, Christian}, booktitle = {AAMAS '08 Proceedings of the 7th international joint conference on Autonomous agents and multiagent systems - Volume 3}, doi = {10.1.1.149.8130}, editor = {Padgham and Parkes and M\"{u}ller and Parsons}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Courgeon, Martin, Jacquemin - 2008 - User’s Gestural Exploration of Different Virtual Agents ’ Expressive Profiles (Short Paper).pdf:pdf}, keywords = {Expressive agent,facial expressions,realtime interaction}, number = {Aamas}, pages = {1237--1240}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems}, title = {{User’s Gestural Exploration of Different Virtual Agents ’ Expressive Profiles (Short Paper)}}, url = {http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.149.8130}, year = {2008} } @article{miller2002mesa, title={Mesa Grande: a methodological analysis of clinical trials of treatments for alcohol use disorders}, author={Miller, William R and Wilbourne, Paula L}, journal={Addiction}, volume={97}, number={3}, pages={265--277}, year={2002}, publisher={Wiley Online Library} } @article{Baldassarri2008, abstract = {This paper presents a powerful animation engine for developing applications with embodied animated agents called Maxine. The engine, based on open source tools, allowsmanagement of scenes and virtual characters, and pays special attention to multimodal and emotional interaction with the user. Virtual actors are endowed with facial expressions, lip-synch, emotional voice, and they can vary their answers depending on their own emotional state and the relationship with the user during conversation. Maxine virtual agents have been used in several applications: a virtual presenterwas employed in MaxinePPT, a specific application developed to allow non-programmers to create 3D presentations easily using classical PowerPoint presentations; a virtual character was also used as an interactive interface to communicate with and control a domotic environment; finally, an interactive pedagogical agent was used to simplify and improve the teaching and practice of Computer Graphics subjects.}, author = {Baldassarri, Sandra and Cerezo, Eva and Seron, Francisco J.}, doi = {10.1016/j.cag.2008.04.006}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Baldassarri, Cerezo, Seron - 2008 - Maxine A platform for embodied animated agents.pdf:pdf}, issn = {00978493}, journal = {Computers \& Graphics}, keywords = {Animated characters,Multimodal interfaces,Natural interaction,Virtual worlds}, month = aug, number = {4}, pages = {430--437}, title = {{Maxine: A platform for embodied animated agents}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0097849308000472}, volume = {32}, year = {2008} } @inproceedings{Gratch2007, abstract = {Emotional bonds don’t arise from a simple exchange of facial displays, but often emerge through the dynamic give and take of face-to-face interactions. This article explores the phenome- non of rapport, a feeling of connectedness that seems to arise from rapid and contingent positive feedback between partners and is often associated with socio-emotional processes. Rapport has been argued to lead to communicative efficiency, better learning outcomes, improved acceptance of medical advice and successful negotiations. We provide experimental evidence that a simple vir- tual character that provides positive listening feedback can induce stronger rapport-like effects than face-to-face communication between human partners. Specifically, this interaction can be more en- gaging to storytellers than speaking to a human audience, as measured by the length and content of their stories.}, address = {Chamonix, France}, author = {Gratch, Jonathan and Wang, Ning and Okhmatovskaia, Anna}, booktitle = {Proceedings of the 12th international conference on Human-computer interaction: intelligent multimodal interaction environments, HCI'07}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gratch, Wang, Okhmatovskaia - 2007 - Can virtual humans be more engaging than real ones.pdf:pdf}, publisher = {Springer-Verlag Berlin, Heidelber}, title = {{Can virtual humans be more engaging than real ones?}}, url = {http://dl.acm.org/citation.cfm?id=1769622}, year = {2007} } @inproceedings{Gordon1985, abstract = {Despite the almost complete lack of research addressing a theoretical understanding of empathy or ways to increase human empathy, empathy is a central component of effective human communication. Seen as a key social science phenomenon, it is viewed, along with power, as an inextricable component of human dynamics, and, in its relationship with altruism, possibly plays a causal role. A problem with research on empathy has been a lack of conceptual clarity. Three ways to improve empathetic listening are to avoid judgment, give the speaker time to speak without interruption, and focus on the speaker. Many of the helping professions have attempted training programs aimed at increasing the empathetic communication skills of practitioners in these fields. However, being told to listen empathetically is not the same as being taught to listen with empathy; and in critique of the empathy skills programs that are conducted within the helping professions, a significantly raised test score does not mean that empathy has been attained. Although empathetic communication is a complex subject matter, skills associated with empathy and active listening have been perceived as being more important than skills associated with critical or deliberative listening.}, address = {Baguio, Philippines}, author = {Gordon, Ronald D.}, booktitle = {International Conference of the World Communication Association}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gordon - 1985 - Empathy The State of the Art and Science.pdf:pdf}, keywords = {Communication (thought transfer),empathhy,interpersonal communication,listening,listening habits,listening skills,speech communication}, pages = {1--16}, title = {{Empathy: The State of the Art and Science}}, year = {1985} } @article{Prendinger2006, abstract = {This paper presents a novel method for evaluating the impact of animated interface agents with affective and empathic behavior. While previous studies relied on question- naires in order to assess the user’s overall experience with the interface agent, we will analyze users’ physiological response (skin conductance and electromyography), which allows us to estimate affect-related user experiences on a moment-by-moment basis with- out interfering with the primary interaction task. As an interaction scenario, a card game has been implemented where the user plays against a virtual opponent. The findings of our study indicate that within a competitive gaming scenario, (i) the absence of the agent’s display of negative emotions is conceived as arousing or stress-inducing, and (ii) the valence of users’ emotional response is congruent with the valence of the emotion expressed by the agent. Our results for skin conductance could also be reproduced by assuming a local rather than a global baseline.}, author = {Prendinger, Helmut and Becker-Asano, Christian}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Prendinger, Becker-Asano - 2006 - A Study in User's Physiological Response to an Empathic Interface Agent.pdf:pdf}, journal = {International Journal of Humanoid}, keywords = {affective behavior,empathy,evaluation,life-like characters,physiological user information}, number = {3}, pages = {371--391}, title = {{A Study in User's Physiological Response to an Empathic Interface Agent}}, volume = {3}, year = {2006} } @article{Robison2010, abstract = {affective interventions can both positively and negatively influence learning experiences. In this paper we investigate the role of student personality, including goal orienta- tion and empathetic tendencies, in estimating confidence in the benefits of an affective intervention strategy. The results indicate that student personality profiles can serve as a powerful tool for informing affective feedback models.}, annote = {We can use the same way of menu base self-report affective state recognition (in addition to automatic) and text-base empathic feedback. Then after each feedback we can evaluate the feedback with another user self-report.}, author = {Robison, Jennifer and McQuiggan, Scott W and Lester, James and Carolina, North}, keywords = {affect,affective computing,pedagogical agents}, pages = {285--295}, title = {{Developing Empirically Based Student Personality Profiles for Affective Feedback Models}}, year = {2010} } @inproceedings{Nguyen2009, abstract = {Experiencing emotional distress is the number one reason why people who are undergoing behaviour modification (e.g. quitting smoking, dieting) suffer from relapses. Providing emotional support is an effective way to help them overcome the unpleasant effects of negative affect and adhere to their regimen. Building computers with such ability has grabbed the attention of the HCI community in recent years. This paper presents the results of a 2 (modality: animated vs. no visual) by 3 (intervention: non-empathy vs. empathy vs. empathy and expressivity) between-subjects study that investigates the impact of two important factors and their interaction in the design of such systems: (1) different ways of expressing empathy, and (2) the modality of delivering such content.}, address = {Claremont, California, USA}, author = {Nguyen, H. and Masthoff, Judith}, booktitle = {Proceedings of the 4th International Conference on Persuasive Technology (Persuasive’09)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Nguyen, Masthoff - 2009 - Designing empathic computers the effect of multimodal empathic feedback using animated agent.pdf:pdf}, isbn = {9781605583761}, keywords = {affective computing,design,experimentation,human factors}, publisher = {ACM}, title = {{Designing empathic computers: the effect of multimodal empathic feedback using animated agent}}, year = {2009} } @article{Shamay-Tsoory2011, abstract = {Human empathy relies on the ability to share emotions as well as the ability to understand the other's thoughts, desires, and feelings. Recent evidence points to 2 separate systems for empathy: an emotional system that supports our ability to empathize emotionally and a cognitive system that involves cognitive understanding of the other's perspective. A neural network that includes the inferior frontal gyrus and the inferior parietal lobule is necessary for emotion recognition and emotional contagion. Although the emotional and cognitive systems appear to work independently, every empathic response may still evoke both components to some extent, depending on the social context.}, annote = {Difference between cognitive empathy and emotional empathy is explained in this paper. Also they talk about the active brain parts in each empathy type.}, author = {Shamay-Tsoory, Simone G}, doi = {10.1177/1073858410379268}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Shamay-Tsoory - 2011 - The neural bases for empathy.pdf:pdf}, issn = {1089-4098}, journal = {The Neuroscientist : a review journal bringing neurobiology, neurology and psychiatry}, keywords = {Brain,Brain: physiology,Empathy,Empathy: physiology,Humans,Neural Pathways,Neural Pathways: physiology}, month = feb, number = {1}, pages = {18--24}, pmid = {21071616}, title = {{The neural bases for empathy.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/21071616}, volume = {17}, year = {2011} } @article{Caridakis2008, abstract = {As input they consider the image sequence of the recorded human behavior. Computer vision and image processing techniques are incorporated in order to detect cues needed for expressivity features extraction. Using multimodalities, the virtual agent mimics the human expressions. The multimodality of the approach lies in the fact that both facial and gestural aspects of the user’s behavior are analyzed and processed. The mimicry consists of perception, interpretation, planning and animation of the expressions shown by the human, resulting not in an exact duplicate rather than an expressive model of the user’s original behavior.}, author = {Caridakis, George and Raouzaiou, Amaryllis and Bevacqua, Elisabetta and Mancini, Maurizio and Karpouzis, Kostas and Malatesta, Lori and Pelachaud, Catherine}, doi = {10.1007/s10579-007-9057-1}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Caridakis et al. - 2008 - Virtual agent multimodal mimicry of humans.pdf:pdf}, issn = {1574-020X}, journal = {International Language Resources and Evaluation Journal: Special issue on Multimodal Corpora For Modelling Human Multimodal Behavior}, keywords = {facial,gesture,mimicry,multimodal,virtual agent}, month = jan, number = {3-4}, pages = {367--388}, title = {{Virtual agent multimodal mimicry of humans}}, volume = {41}, year = {2008} } @article{Lakin2003, abstract = {The “chameleon effect” refers to the tendency to adopt the postures, gestures, and mannerisms of interaction partners (Chartrand \& Bargh, 1999). This type of mimicry occurs outside of conscious awareness, and without any intent to mimic or imitate. Empirical evidence suggests a bi-directional relationship between nonconscious mimicry on the one hand, and liking, rapport, and affiliation on the other. That is, nonconscious mimicry creates affiliation, and affiliation can be ex- pressed through nonconscious mimicry. We argue that mimicry played an impor- tant role in human evolution. Initially, mimicry may have had survival value by helping humans communicate. We propose that the purpose of mimicry has now evolved to serve a social function. Nonconscious behavioral mimicry increases af- filiation, which serves to foster relationships with others. We review current re- search in light of this proposed framework and suggest future areas of research.}, author = {Lakin, J. L. and Jefferis, VE and Cheng, CM}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lakin, Jefferis, Cheng - 2003 - The chameleon effect as social glue Evidence for the evolutionary significance of nonconscious mimicry.pdf:pdf}, journal = {Journal of nonverbal Behavior}, keywords = {affiliation,chameleon effect,human evolution,mimicry}, number = {3}, pages = {145--162}, title = {{The chameleon effect as social glue: Evidence for the evolutionary significance of nonconscious mimicry}}, volume = {27}, year = {2003} } @inproceedings{Boukricha2009, abstract = {a system for simulating emotional facial expressions for a virtual human has been evolved. This system consists of two parts: (1) a control ar- chitecture for simulating emotional facial expressions with respect to Pleasure, Arousal, and Dominance (PAD) val- ues, (2) an expressive output component for animating the virtual human’s facial muscle actions called Action Units (AUs), modeled following the Facial Action Coding Sys- tem (FACS). A large face repertoire of about 6000 faces arranged in PAD-space with respect to two dominance val- ues (dominant vs. submissive) is obtained as a result of the empirical study. Using the face repertoire an approach to- wards realizing facial mimicry for a virtual human based on backward mapping AUs displaying an emotional facial expression on PAD-values is outlined.}, address = {Amsterdam}, author = {Boukricha, Hana and Wachsmuth, Ipke and Hofstatter, A. and Grammer, Karl}, booktitle = {Interaction and Workshops of 3rd International Conference on Affective Computing and Intelligent ACII2009}, doi = {10.1109/ACII.2009.5349579}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Boukricha et al. - 2009 - Pleasure-arousal-dominance driven facial expression simulation.pdf:pdf}, isbn = {9781424447992}, pages = {1--7}, publisher = {IEEE}, title = {{Pleasure-arousal-dominance driven facial expression simulation}}, url = {http://ieeexplore.ieee.org/xpl/freeabs\_all.jsp?arnumber=5349579}, year = {2009} } @inproceedings{McQuiggan2008, abstract = {Humans continuously assess one another’s situational context, modify their own affective state, and then respond based on these outcomes through empathetic expression. Virtual agents should be capable of similarly empathizing with users in interactive environments. A key challenge posed by empathetic reasoning in virtual agents is determining whether to respond with parallel or reactive empathy. Parallel empathy refers to mere replication of another’s affective state, whereas reactive empathy exhibits greater cognitive awareness and may lead to incongruent emotional responses (i.e., emotions different from the recipient’s and perhaps intended to alter negative affect). This paper proposes a unified inductive framework for modeling parallel and reactive empathy. Empathy models are used to drive runtime situation-appropriate empathetic behaviors by selecting suitable parallel or reactive empathetic expressions.}, address = {Estoril, Portugal}, author = {McQuiggan, Scott W and Robison, Jennifer and Phillips, Robert}, booktitle = {Proceedings of 7th Int. Conf. on Autonomous Agents and Multiagent Systems (AAMAS 2008)}, editor = {{Padgham, Parkes}, M\"{u}ller and Parsons}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/McQuiggan, Robison, Phillips - 2008 - Modeling parallel and reactive empathy in virtual agents An inductive approach.pdf:pdf}, keywords = {Affective Reasoning,Empathy,Human-Computer Interaction,Intelligent Virtual Agents,Machine Learning}, number = {Aamas}, pages = {167--174}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems (www.ifaamas.org)}, title = {{Modeling parallel and reactive empathy in virtual agents: An inductive approach}}, year = {2008} } @inproceedings{Wang2009, abstract = {How to build virtual agents that establish rapport with human? According to Tickle-Degnen and Rosenthal, the three essential components of rapport are mutual attentiveness, positivity and coordination. In our previous work, we designed an embodied virtual agent to establish rapport with a human speaker by providing rapid and contingent nonverbal feedback. How do we know that a human speaker is feeling a sense of rapport? In this paper, we focus on the positivity component of rapport by investigating the relationship of human speakers' facial expressions on the establishment of rapport. We used an automatic facial expression coding tool called CERT to analyze the human dyad interactions and human-virtual human interactions. Results show that recognizing positive facial displays alone may be insufficient and that recognized negative facial displays was more diagnostic in assessing the level of rapport between participants.}, address = {Amsterdam}, author = {Wang, Ning and Gratch, Jonathan}, booktitle = {3rd International Conference on Affective Computing and Intelligent Interaction and Workshops, 2009. ACII 2009}, doi = {10.1109/ACII.2009.5349514}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wang, Gratch - 2009 - Rapport and facial expression.pdf:pdf}, isbn = {978-1-4244-4800-5}, month = sep, pages = {1--6}, publisher = {IEEE}, title = {{Rapport and facial expression}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5349514}, year = {2009} } @article{Prendinger2005, abstract = {In this paper, we report on our efforts in developing affective character-based interfaces, i.e., interfaces that recognize and measure affective information of the user and address user affect by employing embodied characters. In particular, we describe the Empathic Companion, an ani- mated interface agent that accompanies the user in the setting of a virtual job interview. This inter- face application takes physiological data (skin conductance and electromyography) of a user in realtime, interprets them as emotions, and addresses the user’s affective states in the form of empathic feedback. The Empathic Companion is conceived as an educational agent that supports job seekers preparing for a job interview. We also present results from an exploratory study that aims to evaluate the impact of the Empathic Companion by measuring users’ skin conductance and heart rate. While an overall positive effect of the Empathic Companion could not be shown, the outcome of the experiment suggests that empathic feedback has a positive effect on the interviewee’s stress level while hearing the interviewer question.}, author = {Prendinger, Helmut and Ishizuka, M.}, doi = {10.1080/08839510590910174}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Prendinger - Unknown - The Empathic Companion A Character-based Interface that Addresses Users ’ Affective States.pdf:pdf;:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Prendinger, Ishizuka - 2005 - The Empathic Companion - A Character-based Interface that Addresses Users’ Affective States.pdf:pdf}, journal = {Applied Artificial Intelligence}, keywords = {electromyography,physiological signals,skin conductance}, number = {3-4}, pages = {267--286}, publisher = {Citeseer}, title = {{The Empathic Companion - A Character-based Interface that Addresses Users’ Affective States}}, volume = {19}, year = {2005} } @article{Albrecht2005, abstract = {We present an algorithm for generating facial expressions for a continuum of pure and mixed emotions of varying intensity. Based on the observation that in natural interaction among humans, shades of emotion are much more frequently encountered than expressions of basic emotions, a method to generate more than Ekman's six basic emotions (joy, anger, fear, sadness, disgust and surprise) is required. To this end, we have adapted the algorithm proposed by Tsapatsoulis et al. [1] to be applicable to a physics-based facial animation system and a single, integrated emotion model. A physics-based facial animation system was combined with an equally flexible and expressive text-to-speech synthesis system, based upon the same emotion model, to form a talking head capable of expressing non-basic emotions of varying intensities. With a variety of life-like intermediate facial expressions captured as snapshots from the system we demonstrate the appropriateness of our approach.}, author = {Albrecht, Irene and Schr\"{o}der, Marc and Haber, J\"{o}rg and Seidel, Hans-Peter}, doi = {10.1007/s10055-005-0153-5}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Albrecht et al. - 2005 - Mixed feelings expression of non-basic emotions in a muscle-based talking head.pdf:pdf}, issn = {1359-4338}, journal = {Virtual Reality}, keywords = {continuous emotions \ae emotional,speech,synthesis \ae facial animation}, month = aug, number = {4}, pages = {201--212}, title = {{Mixed feelings: expression of non-basic emotions in a muscle-based talking head}}, url = {http://www.springerlink.com/index/10.1007/s10055-005-0153-5}, volume = {8}, year = {2005} } @article{Biocca2002, abstract = {This paper outlines the foundation of a definition and measurement for the concept social presence. Justification for such a line of research lies in the ever-increasing use of social presence technologies and expansion of the social interactions across the Internet. A definition of social presence, based upon past literature and theory, describes several levels and dimensions of social presence by which the concept can be operationalized. Specifically, Level 1: co-presence is a necessary but not sufficient requirement for the sense of social presence. Level 2: the Subjective level, attempts to measure the psycho-behavioral accessibility of another interactant. Finally, Level 3: the Intersubjective level, assesses within and cross-interactant symmetry. The purposeful direction of this research and measurement construction is to enable researchers and designers to compare various mediated interactions as well as further theoretical inquiry.}, author = {Biocca, Frank and Harms, Chad}, journal = {Proceedings of PRESENCE}, keywords = {social presence,theory mind}, number = {517}, pages = {1--36}, publisher = {Citeseer}, title = {{Defining and measuring social presence: Contribution to the networked minds theory and measure}}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.84.8350\&rep=rep1\&type=pdf}, volume = {2002}, year = {2002} } @article{Archer1977, author = {Archer, Dane and Akert, Robin M}, journal = {Journal of Personality and Social Psychology}, number = {6}, pages = {443--449}, title = {{Words and everything else: Verbal and nonverbal cues in social interpretation}}, volume = {35}, year = {1977} } @article{Fellner2012, abstract = {Individuals may differ in their ability to learn the significance of emotional cues within a specific context. If so, trait emotional intelligence (EI) may be associated with faster cue learning. This study (N = 180) tested whether trait EI predicts faster learning of a critical cue for discriminating ‘‘terrorists’’ from ‘‘non-terrorists’’, using virtual-reality heads as stimuli. The critical cue was either facial emotion (positive or negative), or a neutral feature (hat size). Cognitive ability and subjective state were also assessed. Par- ticipants were faster to learn with an emotive cue. Surprisingly, high trait EI was correlated with poorer performance, especially early in learning. Subjective distress was also associated with impaired learning to emotive cues. }, author = {Fellner, Angela N. and Matthews, Gerald and Shockley, Kevin D. and Warm, Joel S. and Zeidner, Moshe and Karlov, Lisa and Roberts, Richard D.}, doi = {10.1016/j.jrp.2012.01.004}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Fellner et al. - 2012 - Using emotional cues in a discrimination learning task Effects of trait emotional intelligence and affective sta.pdf:pdf}, issn = {00926566}, journal = {Journal of Research in Personality}, keywords = {trait emotional intelligence}, month = jun, number = {3}, pages = {239--247}, publisher = {Elsevier Inc.}, title = {{Using emotional cues in a discrimination learning task: Effects of trait emotional intelligence and affective state}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0092656612000050}, volume = {46}, year = {2012} } @inproceedings{Amini2014, address = {Miami, FL}, author = {Amini, Reza and Lisetti, Christine and Yasavur, Ugan}, booktitle = {9th International Conf. on Design Science Research in Information Systems and Technology (DESRIST), LNCS8463}, editor = {et Al., M.C. Tremblay}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Amini, Lisetti, Yasavur - 2014 - Emotionally Responsive Virtual Counselor for Behavior-Change Health Interventions.pdf:pdf}, pages = {433--437}, publisher = {Springer International Publishing Switzerland}, title = {{Emotionally Responsive Virtual Counselor for Behavior-Change Health Interventions}}, url = {http://link.springer.com/chapter/10.1007/978-3-319-06701-8\_40}, year = {2014} } @inproceedings{Cairco2009, abstract = {Avari is a virtual receptionist for the Computer Science department at The University of North Carolina at Charlotte. Her components include background subtraction to detect a person’s presence, speech recognition, audio and visual devices to communicate with passersby. Deployed in a public setting, we investigate the reactions and interactions of passersby with Avari. We describe the design and architecture of the virtual human and discuss the effectiveness of a publicly deployed virtual human.}, address = {Clemson, SC, USA.}, author = {Cairco, Lauren and Hill, Rock and Wilson, Dale-marie and Fowler, Vicky and Leblanc, Morris}, booktitle = {48th ACM Southeast Conference (ACMSE'09)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cairco et al. - 2009 - AVARI Animated Virtual Agent Retrieving Information.pdf:pdf}, isbn = {9781605584218}, keywords = {human-centered,human-computer interaction,virtual humans}, pages = {1--6}, title = {{AVARI : Animated Virtual Agent Retrieving Information}}, year = {2009} } @article{Southard1918, author = {Southard, E E}, journal = {The Journal of Abnormal Psychology}, number = {4}, pages = {199}, publisher = {American Psychological Association}, title = {{The empathic index in the diagnosis of mental diseases.}}, volume = {13}, year = {1918} } @inproceedings{Neviarouskaya2007a, abstract = {In this paper, we focus on affect recognition from text in order to facilitate sensitive and expressive communication in computer-mediated environments. Our model for analyzing affect conveyed by text is tailored to handle the style and specifics of informal online conversations. The motivation behind our approach is to improve social interactivity and emotional expressiveness of real-time messaging. In order to estimate affect in text, our model processes symbolic cues, such as emoticons, detects and transforms abbreviations, and employs natural language processing techniques for word/phrase/sentence-level analysis, e.g. by considering relations among words in a sentence. As a result of the analysis, text can be categorized into emotional states and communicative functions. A designed graphical repre- sentation of a user (avatar) displays emotions and social behaviour driven by text and performs natural idle move- ments. The proposed system shows promising results on affect recognition in real examples of online conversation.}, address = {Honolulu, Hawaii, USA}, author = {Neviarouskaya, Alena and Prendinger, Helmut and Ishizuka, Mitsuru}, booktitle = {Proceedings of the 12th international conference on Intelligent user interfaces - IUI '07}, doi = {10.1145/1216295.1216346}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Neviarouskaya, Prendinger, Ishizuka - 2007 - Analysis of affect expressed through the evolving language of online communication.pdf:pdf}, isbn = {1595934812}, keywords = {Affective computing,affective user interface,avatar,emotions,online communication}, pages = {278--281}, publisher = {ACM Press}, title = {{Analysis of affect expressed through the evolving language of online communication}}, url = {http://portal.acm.org/citation.cfm?doid=1216295.1216346}, year = {2007} } @incollection{C.1959, address = {New York, USA}, author = {C., Rogers}, booktitle = {Psychology: the Study of a Science}, edition = {Vol. 3}, editor = {Koch, S.}, pages = {184-- 256}, publisher = {McGraw-Hill}, title = {{A Theory of Therapy, Personality and Interpersonal Relationships as Developed in the Client-Centered Framework}}, year = {1959} } @book{Ekman2002, address = {Salt Lake City, UT}, author = {Ekman, Paul and Freisen, Wallace V. and Hager, Joseph C}, booktitle = {A Human Face}, edition = {2nd}, institution = {Consulting Psychologists}, isbn = {0931835011}, number = {4}, pages = {4--5}, publisher = {Research Nexus eBook}, title = {{Facial Action Coding System}}, volume = {160}, year = {2002} } @article{Hand2008, author = {Hand, Stacey and Varan, Duane}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hand, Varan - 2008 - Interactive Narratives Exploring the Links between Empathy, Interactivity and Structure.pdf:pdf}, journal = {Changing Television Environments}, pages = {11--19}, publisher = {Springer}, title = {{Interactive Narratives: Exploring the Links between Empathy, Interactivity and Structure}}, url = {http://www.springerlink.com/index/15385726247gu863.pdf}, year = {2008} } @inproceedings{Mattheij2013, author = {Mattheij, Ruud and Nilsenova, Marie and Postma, Eric}, booktitle = {2013 Humaine Association Conference on Affective Computing and Intelligent Interaction}, doi = {10.1109/ACII.2013.152}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mattheij, Nilsenova, Postma - 2013 - Vocal and Facial Imitation of Humans Interacting with Virtual Agents.pdf:pdf}, isbn = {978-0-7695-5048-0}, month = sep, pages = {815--820}, publisher = {IEEE}, title = {{Vocal and Facial Imitation of Humans Interacting with Virtual Agents}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6681544}, year = {2013} } @article{Tickle-Degnen1990, abstract = {The purpose of this article is to offer a conceptualization of rapport that has utility for identifiing the nonverbal correlates associated with rapport. We describe the nature of rapport in terms of a dynamic structure of three interrelating components: mutual attentiveness, positivity, and coor- dination. We propose that the relative weighting of these components in the experience of rapport changes over the course of a developing relationship between individuals. In early interactions, positivity and attentiveness are more heavily weighted than coordination, whereas in later interactions, coordination and attentiveness are the more heavily weighted components. Because of the gestalt nature of the experience of rapport, it is not easy to identifi nonverbal behavioral correlates of the components. We discuss two approaches to nonverbal measurement, molecular and molar, along with recommendations for their appropriate application in the study of rapport at different stages of an interpersonal relationship. We present a meta-analytic study that demon- strates the effect of nonverbal behavior, measured at the molecular level, on the positivity component of rapport, and we conclude with an outline of hypotheses relevant to the investigation of the nonverbal correlates of rapport.}, author = {Tickle-Degnen, L. and Rosenthal, Robert}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Tickle-Degnen, Rosenthal - 1990 - The nature of rapport and its nonverbal correlates.pdf:pdf}, journal = {Psychological Inquiry}, number = {4}, pages = {285--293}, publisher = {Taylor \& Francis}, title = {{The nature of rapport and its nonverbal correlates}}, url = {http://www.tandfonline.com/doi/abs/10.1207/s15327965pli0104\_1}, volume = {1}, year = {1990} } @article{Zeng2009, abstract = {Automated analysis of human affective behavior has attracted increasing attention from researchers in psychology, computer science, linguistics, neuroscience, and related disciplines. However, the existing methods typically handle only deliberately displayed and exaggerated expressions of prototypical emotions despite the fact that deliberate behaviour differs in visual appearance, audio profile, and timing from spontaneously occurring behaviour. To address this problem, efforts to develop algorithms that can process naturally occurring human affective behaviour have recently emerged. Moreover, an increasing number of efforts are reported toward multimodal fusion for human affect analysis including audiovisual fusion, linguistic and paralinguistic fusion, and multi-cue visual fusion based on facial expressions, head movements, and body gestures. This paper introduces and surveys these recent advances. We first discuss human emotion perception from a psychological perspective. Next we examine available approaches to solving the problem of machine understanding of human affective behavior, and discuss important issues like the collection and availability of training and test data. We finally outline some of the scientific and engineering challenges to advancing human affect sensing technology.}, author = {Zeng, Zhihong and Pantic, Maja and Roisman, Glenn I and Huang, Thomas S.}, doi = {10.1109/TPAMI.2008.52}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Zeng et al. - 2009 - A survey of affect recognition methods audio, visual, and spontaneous expressions(2).pdf:pdf}, isbn = {9781595938176}, issn = {0162-8828}, journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, keywords = {Affect,Affect: physiology,Algorithms,Artificial Intelligence,Automated,Automated: methods,Emotions,Emotions: physiology,Facial Expression,Monitoring,Pattern Recognition,Physiologic,Physiologic: methods,Sound Spectrography,Sound Spectrography: methods}, month = jan, number = {1}, pages = {39--58}, pmid = {19029545}, title = {{A survey of affect recognition methods: audio, visual, and spontaneous expressions.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19029545}, volume = {31}, year = {2009} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @misc{Ipark.hud.ac.uk2013, author = {Ipark.hud.ac.uk}, title = {{Screencasting | Teaching and Learning Innovation Park}}, url = {Ipark.hud.ac.uk}, urldate = {2013-12-17}, year = {2013} } @article{Gratch2007a, author = {Gratch, Jonathan and Wang, Ning and Gerten, Jillian and Fast, Edward}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gratch et al. - 2007 - Creating rapport with virtual agents.pdf:pdf}, journal = {Intelligent Virtual Agents}, keywords = {evaluation,rapport,virtual agents}, title = {{Creating rapport with virtual agents}}, url = {http://www.springerlink.com/index/X568357400058UM7.pdf}, year = {2007} } @book{Goleman1995, author = {Goleman, D.}, publisher = {Bantam Books}, title = {{Emotional Intelligence}}, year = {1995} } @article{Wierzbicki1993, abstract = {A meta-analysis was conducted of 125 studies on psychotherapy dropout. Mean dropout rate was 46.86\%. Dropout rate was unrelated to most of the variables that were examined but differed significantly as a function of definition of dropout. Lower dropout rates occurred when dropout was defined by termination because of failure to attend a scheduled session than by either therapist judgment or number of sessions attended. Significant effect sizes were observed for 3 client demographic variables: racial status, education, and income. Dropout rates increased for African-American (and other minority), less-educated, and lower income groups. Recommendations for future psychotherapy dropout research are presented.}, author = {Wierzbicki, Michael and Pekarik, Gene}, doi = {10.1037//0735-7028.24.2.190}, isbn = {0735702807357028}, issn = {07357028}, journal = {Professional Psychology Research and Practice}, number = {2}, pages = {190--195}, publisher = {American Psychological Association}, title = {{A meta-analysis of psychotherapy dropout.}}, url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/0735-7028.24.2.190}, volume = {24}, year = {1993} } @article{Szymanski2012, abstract = {Psycholinguistic theories of semantic memory form the basis of understanding of natural language concepts. These theories are used here as an inspiration for implementing a computational model of semantic memory in the form of semantic network. Combining this network with a vector-based object-relation-feature value representation of concepts that includes also weights for confidence and sup- port, allows for recognition of concepts by referring to their features, enabling a semantic search algorithm. This algorithm has been used for word games, in particular the 20-question game in which the program tries to guess a concept that a human player thinks about. The game facilitates lexical knowledge validation and acquisition through the interaction with humans via supervised dialog templates. The elementary linguistic competencies of the proposed model have been evaluated assessing how well it can represent the meaning of lin- guistic concepts. To study properties of information retrieval based on this type of semantic representation in contexts derived from on-going dialogs experiments in limited domains have been performed. Several similarity measures have been used to compare the com- pleteness of knowledge retrieved automatically and corrected through active dialogs to a “golden standard”. Comparison of semantic search with human performance has been made in a series of 20-question games. On average results achieved by human players were better than those obtained by semantic search, but not by a wide margin.}, author = {Szymański, Julian and Duch, Wlodzislaw}, doi = {10.1016/j.cogsys.2011.02.002}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Szymański, Duch - 2012 - Information retrieval with semantic memory model.pdf:pdf}, issn = {13890417}, journal = {Cognitive Systems Research}, keywords = {corresponding author at,department of informatics,leading to low precision,nicolaus,that is returning}, month = apr, number = {1}, pages = {84--100}, title = {{Information retrieval with semantic memory model}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S1389041711000179}, volume = {14}, year = {2012} } @inproceedings{Kumano2011, author = {Kumano, Shiro and Otsuka, Kazuhiro and Mikami, Dan and Yamato, Junji}, booktitle = {Automatic Face \& Gesture Recognition and Workshops (FG 2011), 2011 IEEE International Conference on}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kumano et al. - 2011 - Analyzing empathetic interactions based on the probabilistic modeling of the co-occurrence patterns of facial exp.pdf:pdf}, pages = {43--50}, publisher = {IEEE}, title = {{Analyzing empathetic interactions based on the probabilistic modeling of the co-occurrence patterns of facial expressions in group meetings}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5771440}, year = {2011} } @inproceedings{Lisetti2012a, abstract = {We discuss the design and implementation of the prototype of an avatar-based health system aimed at providing people access to an effective behavior change intervention which can help them to find and cultivate motivation to change unhealthy lifestyles. An empathic Embodied Conversational Agent (ECA) delivers the intervention. The health dialog is directed by a computational model of Motivational Interviewing, a novel effective face-to-face patient-centered counseling style which respects an individual’s pace toward behavior change. Although conducted on a small sample size, results of a preliminary user study to asses users’ acceptance of the avatar counselor indicate that the system prototype is well accepted by 75\% of users.}, address = {Miami, FL, US}, author = {Lisetti, Christine L and Yasavur, Ugan and Leon, Claudia De and Amini, Reza and Rishe, Naphtali}, booktitle = {Proceeding of FLAIRS'2012 Association for the Advancement of Artificial Intelligence (www.aaai.org)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lisetti et al. - 2012 - Building an On-demand Avatar-based Health Intervention for Behavior Change.pdf:pdf}, number = {Mi}, title = {{Building an On-demand Avatar-based Health Intervention for Behavior Change}}, year = {2012} } @inproceedings{Nor2010, author = {Nor, R.M. and Muhlberger, Ralf}, booktitle = {User Science and Engineering (i-USEr), 2010 International Conference on}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Nor, Muhlberger - 2010 - Designing to support empathy Understanding user experience by using a model of interaction in meeting human nee.pdf:pdf}, isbn = {9781424490493}, keywords = {-component,community,empathy,emphatic communication,user experience}, pages = {7--10}, publisher = {IEEE}, title = {{Designing to support empathy: Understanding user experience by using a model of interaction in meeting human needs}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5716713}, year = {2010} } @book{Reynolds2000, address = {Burlington, VT}, author = {Reynolds, W. J.}, publisher = {Ashgate}, title = {{The measurement and development of empathy in nursing}}, year = {2000} } @article{O'Brien2008a, author = {O'Brien, Heather L. and Toms, Elaine G.}, doi = {10.1002/asi}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/O'Brien, Toms - 2008 - What is user engagement A conceptual framework for defining user engagement with technology.pdf:pdf}, journal = {Journal of the American Society for Information Science and Technology}, number = {6}, pages = {938--955}, title = {{What is user engagement? A conceptual framework for defining user engagement with technology}}, url = {http://onlinelibrary.wiley.com/doi/10.1002/asi.20801/full}, volume = {59}, year = {2008} } @article{Wagner1993, author = {Wagner, H. L. and Buck, R. and Winterbotham, M.}, journal = {Journal of Nonverbal Behavior}, pages = {29--53}, title = {{Communication of specific emotions: Gender differences in sending accuracy and communication measures}}, volume = {17}, year = {1993} } @article{Lafrance1979, author = {Lafrance, Marianne}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lafrance - 1979 - Nonverbal Synchrony Panel Technique Analysis by the Cross-Lag and Rapport.pdf:pdf}, journal = {Social Psychology}, number = {1}, pages = {66--70}, title = {{Nonverbal Synchrony Panel Technique: Analysis by the Cross-Lag and Rapport}}, volume = {42}, year = {1979} } @article{D&39;Mello2006, author = {Mello, Sidney D' and Graesser, Arthur C}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mello, Graesser - 2006 - Affect detection from human-computer dialogue with an intelligent tutoring system.pdf:pdf}, journal = {Intelligent Virtual Agents}, pages = {54--67}, title = {{Affect detection from human-computer dialogue with an intelligent tutoring system}}, url = {http://www.springerlink.com/index/b574kpu6nl719408.pdf}, year = {2006} } @article{Carr2003, abstract = {How do we empathize with others? A mechanism according to which action representation modulates emotional activity may provide an essential functional architecture for empathy. The superior temporal and inferior frontal cortices are critical areas for action representation and are connected to the limbic system via the insula. Thus, the insula may be a critical relay from action representation to emotion. We used functional MRI while subjects were either imitating or simply observing emotional facial expressions. Imitation and observation of emotions activated a largely similar network of brain areas. Within this network, there was greater activity during imitation, compared with observation of emotions, in premotor areas including the inferior frontal cortex, as well as in the superior temporal cortex, insula, and amygdala. We understand what others feel by a mechanism of action representation that allows empathy and modulates our emotional content. The insula plays a fundamental role in this mechanism.}, author = {Carr, Laurie and Iacoboni, Marco and Dubeau, Marie-Charlotte and Mazziotta, John C and Lenzi, Gian Luigi}, doi = {10.1073/pnas.0935845100}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Carr et al. - 2003 - Neural mechanisms of empathy in humans a relay from neural systems for imitation to limbic areas.pdf:pdf}, isbn = {0935845100}, issn = {0027-8424}, journal = {Proceedings of the National Academy of Sciences of the United States of America}, keywords = {Adult,Behavior,Empathy,Female,Humans,Limbic System,Limbic System: physiology,Magnetic Resonance Imaging,Male}, month = may, number = {9}, pages = {5497--502}, pmid = {12682281}, title = {{Neural mechanisms of empathy in humans: a relay from neural systems for imitation to limbic areas.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=154373\&tool=pmcentrez\&rendertype=abstract}, volume = {100}, year = {2003} } @incollection{Gunes2008, author = {Gunes, Hatice and Piccardi, Massimo and Pantic, Maja}, booktitle = {Affective Computing: Focus on Emotion Expression, Synthesis, and Recognition}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gunes, Piccardi, Pantic - 2008 - From the Lab to the Real World Affect Recognition Using Multiple Cues and Modalities.pdf:pdf}, pages = {185--218}, title = {{From the Lab to the Real World: Affect Recognition Using Multiple Cues and Modalities}}, year = {2008} } @incollection{Stueber2008, author = {Stueber, Karsten}, booktitle = {The Stanford Encyclopedia of Philosophy}, edition = {Fall 2008}, editor = {Zalta, Edward N.}, title = {{Empathy}}, url = {http://plato.stanford.edu/archives/fall2008/entries/empathy/}, year = {2008} } @phdthesis{Jacques1996, author = {Jacques, R.D.}, pages = {103}, school = {South Bank University}, title = {{The Nature of Engagement and its Role in Hypermedia Evaluation and Design}}, type = {PhD Dissertation}, year = {1996} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @inproceedings{Kenny2007, abstract = {Virtual humans offer an exciting and powerful potential for rich interactive experiences. Fully embodied virtual humans are growing in capability, ease, and utility. As a result, they present an opportunity for expanding research into burgeoning virtual patient medical applications. In this paper we consider the ways in which one may go about building and applying virtual human technology to the virtual patient domain. Specifically we aim to show that virtual human technology may be used to help develop the interviewing and diagnostics skills of developing clinicians. Herein we proffer a description of our iterative design process and preliminary results to show that virtual patients may be a useful adjunct to psychotherapy education.}, author = {Kenny, Patrick and Parsons, T and Gratch, Jonathan and Leuski, Anton and Rizzo, A}, booktitle = {Intelligent Virtual Agents (IVA'07)}, editor = {et Al., C. Pelachaud}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kenny et al. - 2007 - Virtual patients for clinical therapist skills training.pdf:pdf}, keywords = {psychopathology,virtual humans,virtual patients}, pages = {197--210}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{Virtual patients for clinical therapist skills training}}, url = {http://www.springerlink.com/index/J76642WV61N3R017.pdf}, year = {2007} } @article{Cassell2001b, abstract = {This paper addresses the issue of designing embodied conversational agents that exhibit appropriate posture shifts during dialogues with human users. Previous research has noted the importance of hand gestures, eye gaze and head nods in conversations between embodied agents and humans. We present an analysis of human monologues and dialogues that suggests that postural shifts can be predicted as a function of discourse state in monologues, and discourse and conversation state in dialogues. On the basis of these findings, we have implemented an embodied conversational agent that uses Collagen in such a way as to generate postural shifts.}, author = {Cassell, Justine and Nakano, Yukiko I and Bickmore, Timothy Wallace and Sidner, Candace L and Rich, Charles}, doi = {10.3115/1073012.1073028}, institution = {Association for Computational Linguistics Morristown, NJ, USA}, journal = {Proceedings of the 39th Annual Meeting on Association for Computational Linguistics ACL 01}, pages = {114--123}, publisher = {Association for Computational Linguistics}, series = {ACL '01}, title = {{Non-verbal cues for discourse structure}}, url = {http://portal.acm.org/citation.cfm?doid=1073012.1073028}, year = {2001} } @article{Larimer2009, abstract = {It is well established that college students have high rates of alcohol use and misuse and suffer the negative consequences of this behavior. Research evaluating the results of brief interventions with high-risk college students has shown these approaches to be successful in reducing alcohol con- sumption and/or related consequences. Several screening tools have been developed to detect the presence of problematic alcohol use and associated disorders, and some are designed specifically for use in a college student population. College campuses offer several opportunities to implement screening and interventions, including universal or large-scale assessments; health services, counsel- ing centers, or local emergency rooms; or via established judicial or grievance systems set up to deal with students who violate campus alcohol policies. Issues to consider when implementing screening and brief interventions in college populations include who should deliver the interventions—peer or professional counselors—and how students should be encouraged to participate in the interventions. Regardless of how the measures are implemented, the content and process of the brief interventions should be based on the available scientific evidence regarding established efficacious interventions.}, author = {Larimer, Mary E and Cronce, Jessica M and Lee, Christine M and Kilmer, Jason R}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Larimer et al. - 2004 - Brief Intervention in College Settings.pdf:pdf}, journal = {Alcohol Research \& Health}, keywords = {AODD (alcohol and other drug use disorder),CAGE Questionnaire,Michigan Alcoholism Screening Test (MAST),Young Adult Alcohol Problems Screening Test (YAAPS,alcohol abuse,binge drinking,brief intervention,heavy drinking,identification and screening,interview,literature review,motivational interviewing,peer counseling,professional counseling,undergraduate student}, pages = {94--104}, title = {{Brief Intervention in College Settings}}, url = {?http://pubs.niaaa.nih.gov/publications/arh28 ?2/94?104 .htm}, volume = {28}, year = {2004} } @inproceedings{Kashyap2012, abstract = {Earlier works on personalized Web search focused on the click- through graphs, while recent works leverage social annotations, which are often unavailable. On the other hand, many users are members of the social networks and subscribe to social groups. Intuitively, users in the same group may have similar relevance judgments for queries related to these groups. SonetRank utilizes this observation to personalize the Web search results based on the aggregate relevance feedback of the users in similar groups. SonetRank builds and maintains a rich graph-based model, termed Social Aware Search Graph, consisting of groups, users, queries and results click-through information. SonetRank’s personalization scheme learns in a principled way to leverage the following three signals, of decreasing strength: the personal document preferences of the user, of the users of her social groups relevant to the query, and of the other users in the network. SonetRank also uses a novel approach to measure the amount of personalization with respect to a user and a query, based on the query-specific richness of the user’s social profile. We evaluate SonetRank with users on Amazon Mechanical Turk and show a significant improvement in ranking compared to state-of-the-art techniques.}, address = {Maui, HI, USA}, author = {Kashyap, Abhijith and Amini, Reza and Hristidis, Vagelis}, booktitle = {ACM 21st Conference on Information and Knowledge Management CIKM 2012}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kashyap, Amini, Hristidis - 2012 - SonetRank Leveraging Social Networks to Personalize Search.pdf:pdf}, isbn = {9781450311564}, keywords = {Results Re-ranking.,Search Personalization,Social Search}, publisher = {ACM}, title = {{SonetRank : Leveraging Social Networks to Personalize Search}}, year = {2012} } @article{Cowie2001, author = {Cowie, R. and Douglas-Cowie, E. and Tsapatsoulis, N. and Votsis, G. and Kollias, S. and Fellenz, W. and Taylor, J. G.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cowie et al. - 2001 - Emotion Recognition in Human-Computer Interaction.pdf:pdf}, journal = {IEEE Signal Processing Magazine}, number = {January}, pages = {32--80}, title = {{Emotion Recognition in Human-Computer Interaction}}, year = {2001} } @incollection{WeinerBGraham1984, address = {New York}, author = {{Weiner B Graham}, S}, booktitle = {Emotions, cognition, and behavior}, editor = {Izard, Carroll E and Kagan, J and Zajonc, Robert B}, pages = {167--191}, publisher = {Cambridge University Press}, title = {{An attributional approach to emotional development}}, year = {1984} } @inproceedings{Kang2009, address = {Marriott, Chicago, IL}, author = {Kang, Sin-hwa and Watt, James H and Gratch, Jonathan}, booktitle = {Paper presented at the annual meeting of the International Communication Association}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kang, Watt, Gratch - 2009 - Associations between Interactants ’ ’ Personality Traits and Their Feelings of Rapport in Interactions with.pdf:pdf}, pages = {1--25}, title = {{Associations between Interactants ’ ’ Personality Traits and Their Feelings of Rapport in Interactions with Virtual Humans}}, year = {2009} } @article{Orozco2010, author = {Orozco, H. and Thalmann, Daniel and Ramos, F.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Orozco, Thalmann, Ramos - 2010 - Making empathetic virtual humans in human–computer interaction scenarios.pdf:pdf}, journal = {Proceedings of 11th Computer Graphics International, CGI}, title = {{Making empathetic virtual humans in human–computer interaction scenarios}}, url = {http://cgi2010.miralab.unige.ch/short/SP09/SP09.pdf}, volume = {10}, year = {2010} } @book{Spencer1870, address = {London}, author = {Spencer, H.}, publisher = {Williams and Norgate}, title = {{The principles of psychology}}, year = {1870} } @inproceedings{Cavazza2010a, abstract = {We demonstrate a “Companion” ECA, which is able to provide advice and support to the user, taking into account emotions expressed by her through dialogue. The integration of all required multimodal I/O components is based on interaction strategies defining the shape of dialogue, on the ECA’s response times, and on the underlying affective strategy. The system supports free conversation on an everyday life scenario in which the user comments her day at the office.}, address = {Toronto, Canada,}, author = {Cavazza, Marc and Vargas, C Emilio}, booktitle = {Proc. of and Multiagent 9th Int. Systems Conf. on Autonomous Agents (AAMAS 2010)}, editor = {van der Hoek, Kaminka and Lesp\'{e}rance, Luck and Sen}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cavazza, Vargas - 2010 - How Was Your Day A Companion ECA.pdf:pdf}, keywords = {Affective Interfaces,Embodied Conversational Agents,Human- Computer Dialogue.}, number = {Aamas}, pages = {1629--1630}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems (www.ifaamas.org)}, title = {{How Was Your Day ? A Companion ECA}}, year = {2010} } @book{Scherer2001, address = {New York, NY, US}, author = {Scherer, Klaus R.}, editor = {{Scherer, Klaus R. and Schorr, Angela and Johnstone}, Tom}, isbn = {0-19-513007-3}, keywords = {appraisal,cognitive-motivational-relational theory of emotio,coping,emotions,psychological stress,theory development}, pages = {478}, publisher = {Oxford University Press}, title = {{Appraisal processes in emotion: Theory, methods, research}}, year = {2001} } @article{Mignault2003, abstract = {Ased on the premise that human head tilt is homologous to animal dominance displays, we hypothesized that when a head is bowed, the face should be perceived as submissive, sad, displaying inferiority emotions (i.e., shame, embarrassment, guilt, humiliation, and respect) and, paradoxically, as contracting the zygomatic major muscle. Conversely, a raised head should be perceived as more dominant and displaying greater superiority emotions (i.e., contempt and pride). We conducted two experiments showing 3-D models of faces to 64 participants. The results confirmed our hypotheses and also showed that a raised head connotes happiness. In addition, we found a significant influence of the actors' sex on participants' perception, such as a bias towards perceiving stronger upward contraction of the mouth in female than male actors when the head is tilted. We discuss these findings within the context of evolution and social behavior.}, author = {Mignault, Alain and Chaudhuri, Avi}, doi = {10.1023/A:1023914509763}, issn = {01915886}, journal = {Journal of Nonverbal Behavior}, number = {2}, pages = {111--132}, publisher = {Springer}, title = {{THE MANY FACES OF A NEUTRAL FACE : HEAD TILT AND PERCEPTION OF DOMINANCE AND EMOTION}}, url = {http://dx.doi.org/10.1023/A:1023914509763}, volume = {27}, year = {2003} } @article{Kim2004, abstract = {The present study attempted to develop new scales of patient-perceived, empathy-related constructs and to test a model of the relationships of physician empathy and related constructs to patient satisfaction and compliance. Five hundred fifty outpatients at a large university hospital in Korea were interviewed with the questionnaire. The data were analyzed using structural equation modeling. Patient-perceived physician empathy significantly influenced patient satisfaction and compliance via the mediating factors of information exchange, perceived expertise, inter-personal trust, and partnership. Improving physician empathic communication skills should increase patient satisfaction and compliance. Health providers who wish to improve patient satisfaction and compliance should first identify components of their empathic communication needing improvement and then try to refine their skills to better serve patients.}, author = {Kim, Sung Soo and Kaplowitz, Stan and Johnston, Mark V}, issn = {01632787}, journal = {Evaluation the health professions}, keywords = {communication,empathy,humans,korea,patient compliance,patient satisfaction,physician patient relations,questionnaires}, number = {3}, pages = {237--51}, title = {{The effects of physician empathy on patient satisfaction and compliance.}}, url = {http://ehp.sagepub.com/content/27/3/237.short}, volume = {27}, year = {2004} } @article{Speedling1985, abstract = {In this paper, the authors argue that patient satisfaction is an insufficient measure of the quality of the doctor-patient relationship. While shown to have a salutary effect on patient anxiety concerning illness and treatment, the only other significant outcome associated with levels of satisfaction is utilization behavior. This is not surprising, the authors argue, since prevailing conceptualizations of patient satisfaction fail to incorporate measures of patient participation in the therapeutic process. Evidence suggests that by encouraging patients to take an active role in their health care physicians can increase the effectiveness of their therapeutic activities. A method for involving patients is through incorporating their preferences into the physician's decision-making processes. An example of physician decision making which incorporates patient preferences is provided.}, author = {Speedling, E J and Rose, D N}, journal = {Social science medicine}, keywords = {professional patient relationship}, number = {2}, pages = {115--120}, pmid = {4048997}, title = {{Building an effective doctor-patient relationship: from patient satisfaction to patient participation.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/4048997}, volume = {21}, year = {1985} } @article{Poh2010, author = {Poh, MZ and McDuff, DJ}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Poh, McDuff - 2010 - Non-contact, automated cardiac pulse measurements using video imaging and blind source separation.pdf:pdf}, journal = {Optics Express}, number = {10}, pages = {10762--10774}, title = {{Non-contact, automated cardiac pulse measurements using video imaging and blind source separation}}, volume = {18}, year = {2010} } @inproceedings{Mulder2004, abstract = {In this paper we report about our research towards the use of affect in language wherein we have attempted to formalise the affective functionality at word and grammatical level for a fraction of Dutch and English. These formalisations have been demonstrated in a pilot experiment. The empirical background of the formalisation, and the results of the experiment constitute the basis for further research on a lexical, grammatical implementation of affect.}, author = {Mulder, Matthijs and Nijholt, Anton and {Den Uyl}, Marten and Terpstra, Peter}, booktitle = {Proceedings of the 7th International Conference Text Speech and Dialogue TSD04}, pages = {171--178}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, title = {{A Lexical Grammatical Implementation of Affect}}, url = {http://wwwhome.cs.utwente.nl/~anijholt/artikelen/tsd2004.pdf}, volume = {3206}, year = {2004} } @incollection{Preston2007, author = {Preston, SD}, booktitle = {Empathy in mental illness}, chapter = {23}, editor = {Farrow, T. and Woodruff, P.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Preston - 2007 - A perception-action model for empathy.pdf:pdf}, isbn = {0521847346}, pages = {428--446}, publisher = {Cambridge University Press}, title = {{A perception-action model for empathy}}, url = {http://www-personal.umich.edu/~prestos/Downloads/Preston2007\_MI.pdf}, year = {2007} } @article{Tian2001, author = {Tian, Y.-I. and Kanade, T. and Cohn, J.F.}, doi = {10.1109/34.908962}, file = {::}, issn = {01628828}, journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, number = {2}, pages = {97--115}, title = {{Recognizing action units for facial expression analysis}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=908962}, volume = {23}, year = {2001} } @article{DiClemente2001, abstract = {OBJECTIVE: To offer a taxonomy of types of feedback and describe potential mechanisms of action particularly in the area of addictive behaviors. METHOD: Reviewed the literature to examine support for types-Generic, Targeted, and Personalized-and for mechanisms of feedback. RESULTS: Although it is not clear how it works, feedback is thought to offer important information, to create a sense of caring and helping relationship, to reach more directly decisional considerations, to increase engagement in the materials, to increase motivation, or to provide social comparison and norms. CONCLUSIONS: Avenues for future research in search of the most effective manner of using feedback to promote health behavior change are discussed.}, author = {DiClemente, C C and Marinilli, A S and Singh, M and Bellino, L E}, institution = {Psychology Department, University of Maryland, Baltimore County, Baltimore 21250, USA. diclemen@umbc.edu}, journal = {American Journal of Health Behavior}, keywords = {addictive,addictive prevention \& control,addictive psychology,behavior,classification,feedback,health behavior,health education,health education classification,humans,mass screening,models,psychological,risk taking}, number = {3}, pages = {217--227}, pmid = {11322620}, publisher = {PNG Publications}, title = {{The role of feedback in the process of health behavior change.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/11322620}, volume = {25}, year = {2001} } @article{Cassell1999, abstract = {In this article we describe results froman experiment of user interaction with autonomous , human - like ( humanoid ) conversational agents . We hypothesize that for embodied conversational agents , nonverbal behaviors related to the process of conversation , what we call envelope feedback, is much more important than other feedback , such as emotional expression . We test this hypothesis by having subjects interact with three autonomous agents , all capable of full - duplex multimodal interaction: able to generate and recognize speech , intonation , facial displays , and gesture . Each agent , however , gave a different kind of feedback: ( 1 ) content - related only , ( 2 ) content + envelope feedback , and ( 3 ) content + emotional . Content-related feedback includes answering questions and executing commands; envelope feedback includes behaviors such as gaze , manual beat gesture , and head movements; emotional feedback includes smiles and looks of puzzlement . Subjects' evaluations of the systemwere collected with a questionnaire , and videotapes of their speech patterns and behaviors were scored according to how often the users repeated themselves , how often they hesitated , and how often they got frustrated . The results confirmour hypothesis that envelope feedback is more important in interaction than emotional feedback and that envelope feedback plays a crucial role in supporting the process of dialog . A secondary result fromthis study shows that users give our multimodal conversational humanoids very high ratings of lifelikeness and fluidity of interaction when the agents are capable of giving such feedback .}, author = {Cassell, Justine and Thorisson, K.R.}, doi = {10.1080/088395199117360}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cassell, Thorisson - 1999 - The power of a nod and a glance Envelope vs. emotional feedback in animated conversational agents.pdf:pdf}, journal = {Applied Artificial Intelligence}, number = {4-5}, pages = {519--538}, publisher = {Taylor \& Francis}, title = {{The power of a nod and a glance: Envelope vs. emotional feedback in animated conversational agents}}, url = {http://www.tandfonline.com/doi/abs/10.1080/088395199117360}, volume = {13}, year = {1999} } @article{Fehr1984, author = {Fehr, B. and Russell, S.J.}, journal = {Journal of experimental psychology. General}, pages = {464 -- 486}, title = {{Concept of emotion viewed from a prototype perspective}}, volume = {113}, year = {1984} } @inproceedings{Bartlett2004, author = {Bartlett, M.S. and Littlewort, G. and Lainscsek, C. and Fasel, I. and Movellan, J.}, booktitle = {2004 IEEE International Conference on Systems, Man and Cybernetics (IEEE Cat. No.04CH37583)}, doi = {10.1109/ICSMC.2004.1398364}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bartlett et al. - 2004 - Machine learning methods for fully automatic recognition of facial expressions and facial actions.pdf:pdf}, isbn = {0-7803-8567-5}, keywords = {1 real-time face detection,2,adaboost,chines,facial action cod-,facial expression recognition,feature selection,ing,linear discriminant analysis,machine learning,support vector ma-}, pages = {592--597}, publisher = {IEEE}, title = {{Machine learning methods for fully automatic recognition of facial expressions and facial actions}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=1398364}, volume = {1}, year = {2004} } @book{Izard1977, address = {New York}, author = {Izard, Carroll Ellis}, editor = {Izard, Carroll Ellis}, isbn = {9780306309861}, pages = {495}, publisher = {Plenum Press}, title = {{Human Emotions}}, year = {1977} } @article{Joshi2013, author = {Joshi, Dipti D and Zalte, M B}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Joshi, Zalte - 2013 - Speech Emotion Recognition A Review.pdf:pdf}, journal = {IOSR Journal of Electronics and Communication Engineering}, keywords = {classifier,emotion recognition,feature extraction,feature selection}, number = {4}, pages = {34--37}, title = {{Speech Emotion Recognition : A Review}}, volume = {4}, year = {2013} } @article{Becker-Asano2009, abstract = {We introduce theWASABI ([W]ASABI [A]ffect [S]imulation for [A]gents with [B]elievable [I]nteractivity)Affect SimulationArchitecture, in which a virtual human’s cog- nitive reasoning capabilities are combined with simulated embodiment to achieve the sim- ulation of primary and secondary emotions. In modeling primary emotions we follow the idea of “Core Affect” in combination with a continuous progression of bodily feeling in three-dimensional emotion space (PADspace), that is subsequently categorized into discrete emotions. In humans, primary emotions are understood as onto-genetically earlier emotions, which directly influence facial expressions. Secondary emotions, in contrast, afford the abil- ity to reason about current events in the light of experiences and expectations.}, author = {Becker-Asano, Christian and Wachsmuth, Ipke}, doi = {10.1007/s10458-009-9094-9}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Becker-Asano, Wachsmuth - 2009 - Affective computing with primary and secondary emotions in a virtual human.pdf:pdf}, issn = {1387-2532}, journal = {Autonomous Agents and Multi-Agent Systems}, keywords = {affect simulation,affective computing,affective gaming,architecture,aware emotions,bdi-based architecture,embodied agent,emotion dynamics,emotion expression,emotion modeling,pad emotion space,primary and secondary emotions,reality,virtual,virtual human}, month = may, number = {1}, pages = {32--49}, title = {{Affective computing with primary and secondary emotions in a virtual human}}, url = {http://www.becker-asano.de/AffectiveComputingWithPrimaryAndSecondaryEmotionsInAVirtualHuman.pdf}, volume = {20}, year = {2009} } @article{Chartrand1999, abstract = {The chameleon effect refers to nonconscious mimicry of the postures, mannerisms, facial expressions, and other behaviors of one's interaction partners, such that one's behavior passively and unintentionally changes to match that of others in one's current social environment. The authors suggest that the mechanism involved is the perception-behavior link, the recently documented finding (e.g., J. A. Bargh, M. Chen, \& L. Burrows, 1996) that the mere perception of another's behavior automatically increases the likelihood of engaging in that behavior oneself. Experiment 1 showed that the motor behavior of participants unintentionally matched that of strangers with whom they worked on a task. Experiment 2 had confederates mimic the posture and movements of participants and showed that mimicry facilitates the smoothness of interactions and increases liking between interaction partners. Experiment 3 showed that dispositionally empathic individuals exhibit the chameleon effect to a greater extent than do other people.}, author = {Chartrand, T. L. and Bargh, J A}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Chartrand, Bargh - 1999 - The chameleon effect the perception-behavior link and social interaction.pdf:pdf}, issn = {0022-3514}, journal = {Journal of personality and social psychology}, keywords = {Analysis of Variance,Empathy,Facial Expression,Female,Group Processes,Humans,Imitative Behavior,Interpersonal Relations,Male,Models,Multivariate Analysis,New York City,Posture,Psychological,Social Behavior,Social Perception}, month = jun, number = {6}, pages = {893--910}, pmid = {10402679}, title = {{The chameleon effect: the perception-behavior link and social interaction.}}, volume = {76}, year = {1999} } @inproceedings{Kipp2006, abstract = {Providing virtual characters with natural gestures is a com- plex task. Even if the range of gestures is limited, deciding when to play whichgesture maybe considered bothanengineeringor anartistic task. We want to strike a balance by presenting a system where gesture selec- tion and timing can be human authored in a script, leaving full artistic freedom to the author. However, to make authoring faster we offer a rule system that generates gestures on the basis of human authored rules. To push automation further, we show how machine learning can be uti- lized to suggest further rules on the basis of previously annotated scripts. Our system thus offers different degrees of automation for the author, allowing for creativity and automation to join forces.}, address = {Marina Del Rey, CA, USA}, author = {Kipp, Michael}, booktitle = {6th International Conference on Intelligent Virtual Agents (IVA'06)}, doi = {10.1007/11821830\_19}, editor = {Gratch, Jonathan}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kipp - 2006 - Creativity Meets Automation Combining Nonverbal Action Authoring with Rules and Machine Learning.pdf:pdf}, pages = {230--242}, publisher = {Springer Berlin Heidelberg}, title = {{Creativity Meets Automation : Combining Nonverbal Action Authoring with Rules and Machine Learning}}, year = {2006} } @article{Spurgeon2010, abstract = {There has been a recent acceleration in the development and testing of programs for computer-assisted cognitive-behavioral therapy (CCBT). Programs are now available for treatment of depression, anxiety disorders, and other psychiatric conditions. Technology for delivery of CCBT includes multimedia programs, virtual reality, and handheld devices. Research on CCBT generally has supported the efficacy of computer-assisted therapy and has shown patient acceptance of computer tools for psychotherapy. Completion rates and treatment efficacy typically have been higher when clinicians prescribe and support the use of psychotherapeutic computer programs than when programs are delivered in a self-help format without clinician involvement. CCBT seems to have the potential to improve access to evidence-based therapies while reducing the demand for clinician time.}, author = {Spurgeon, Joyce a and Wright, Jesse H}, doi = {10.1007/s11920-010-0152-4}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Spurgeon, Wright - 2010 - Computer-assisted cognitive-behavioral therapy.pdf:pdf}, isbn = {1192001001}, issn = {1535-1645}, journal = {Current psychiatry reports}, keywords = {Anxiety Disorders,Anxiety Disorders: therapy,Cognitive Therapy,Depressive Disorder,Depressive Disorder: therapy,Humans,Therapy, Computer-Assisted,Treatment Outcome}, month = dec, number = {6}, pages = {547--52}, pmid = {20872100}, title = {{Computer-assisted cognitive-behavioral therapy.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/20872100}, volume = {12}, year = {2010} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{Bickmore2010a, abstract = {Depression affects approximately 15\% of the US population, and is recognized as an important risk factor for poor outcomes among patients with various illnesses. Automated health education and behavior change programs have the potential to help address many of the shortcomings in health care. However, the role of these systems in the care of patients with depression has been insufficiently examined. In the current study, we sought to evaluate how hospitalized medical patients would respond to a computer animated conversational agent that has been developed to provide information in an empathic fashion about a patient's hospital discharge plan. In particular, we sought to examine how patients who have a high level of depressive symptoms respond to this system. Therapeutic alliance-the trust and belief that a patient and provider have in working together to achieve a desired therapeutic outcome- was used as the primary outcome measure, since it has been shown to be important in predicting outcomes across a wide range of health problems, including depression. In an evaluation of 139 hospital patients who interacted with the agent at the time of discharge, all patients, regardless of depressive symptoms, rated the agent very high on measures of satisfaction and ease of use, and most preferred receiving their discharge information from the agent compared to their doctors or nurses in the hospital. In addition, we found that patients with symptoms indicative of major depression rated the agent significantly higher on therapeutic alliance compared to patients who did not have major depressive symptoms. We conclude that empathic agents represent a promising technology for patient assessment, education and counseling for those most in need of comfort and caring in the inpatient setting.}, author = {Bickmore, Timothy Wallace and Mitchell, Suzanne E and Jack, Brian W and Paasche-Orlow, Michael K and Pfeifer, Laura M and Odonnell, Julie}, doi = {10.1016/j.intcom.2009.12.001}, issn = {09535438}, journal = {Interacting with Computers}, number = {4}, pages = {289--298}, pmid = {20628581}, publisher = {Elsevier B.V.}, title = {{Response to a Relational Agent by Hospital Patients with Depressive Symptoms.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=2901553\&tool=pmcentrez\&rendertype=abstract}, volume = {22}, year = {2010} } @inproceedings{Morency2008, abstract = {During face-to-face interactions, listeners use backchannel feedback such as head nods as a signal to the speaker that the communication is working and that they should continue speaking. Predicting these backchannel opportunities is an important milestone for building engaging and natural virtual humans. In this paper we show how sequential probabilistic models (e.g., Hidden Markov Model or Conditional Random Fields) can automatically learn from a database of human-to-human interactions to predict listener backchannels using the speaker multimodal output features (e.g., prosody, spoken words and eye gaze). The main challenges addressed in this paper are automatic selection of the relevant features and optimal feature representation for probabilistic models. For prediction of visual backchannel cues (i.e., head nods), our prediction model shows a statistically significant improvement over a previously published approach based on hand-crafted rules.}, address = {Tokyo, Japan}, author = {Morency, LP and Kok, Iwan De and Gratch, Jonathan}, booktitle = {8th International Conference on Intelligent Virtual Agents(IVA'08}, doi = {10.1007/978-3-540-85483-8\_18}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Morency, Kok, Gratch - 2008 - Predicting listener backchannels A probabilistic multimodal approach.pdf:pdf}, pages = {176--190}, publisher = {Springer Berlin Heidelberg}, title = {{Predicting listener backchannels: A probabilistic multimodal approach}}, url = {http://www.springerlink.com/index/180267KR7P8PT321.pdf}, year = {2008} } @inproceedings{Li2012a, author = {Li, Linghua and Liu, Yongkui and Zhang, Hengbo}, booktitle = {International Conference on Computer Science and Electronics Engineering}, doi = {10.1109/ICCSEE.2012.129}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Li, Liu, Zhang - 2012 - A Survey of Computer Facial Animation Techniques.pdf:pdf}, isbn = {978-0-7695-4647-6}, keywords = {animation data,facial animation,facial modeling}, month = mar, number = {60675008}, pages = {434--438}, publisher = {IEEE}, title = {{A Survey of Computer Facial Animation Techniques}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6188272}, year = {2012} } @misc{Kalliopuska1992, abstract = {4,268 students (aged 14-20 yrs) from a comprehensive school, from gymnasium, and from vocational school were administered measures of empathy, sensitivity, self-esteem, assertiveness, and narcissism. The most empathetic students were compared with the least empathetic ones; the former were more assertive, less narcissistic, less self-focused, and more sensitive. They had more positive attitudes toward health behavior: they smoked less and used less alcohol than the least empathetic ones. A holistic empathetic trend was seen in thoughts and in behavior, which works against narcissism. Results suggest that humans need education on empathy in schools to promote mental growth and health behavior. (PsycINFO Database Record (c) 2012 APA, all rights reserved)}, author = {Kalliopuska, Mirja}, booktitle = {Psychological Reports}, keywords = {alcohol,assertiveness,attitudes,behaviour,education,factors,health,measures,narcissism,personality,schools,self esteem,selfesteem,sensitivity,students,work}, number = {3, Pt 2}, pages = {1119--1122}, title = {{Attitudes towards health, health behaviour, and personality factors among school students very high on empathy}}, volume = {70}, year = {1992} } @article{Kurlander1996, abstract = {Comics have a rich visual vocabulary, and people find them appealing. They are also an effective form of communication. We have built a system, called Comic Chat, that represents on-line communications in the form of comics. Comic Chat automates numerous aspects of comics generation, including balloon construction and layout, the placement and orientation of comic characters, the default selection of character gestures and expressions, the incorporation of semantic panel elements, and the choice of zoom factor for the virtual camera. This paper describes the mechanisms that Comic Chat uses to perform this automation, as well as novel aspects of the program's user interface. Comic Chat is a working program, allowing groups of people to communicate over the Internet. It has several advantages over other graphical chat programs, including the availability of a graphical history, and a dynamic graphical presentation.}, author = {Kurlander, David and Skelly, Tim and Salesin, David}, doi = {10.1145/237170.237260}, editor = {Rushmeier, Holly}, isbn = {0897917464}, issn = {00978930}, journal = {Proceedings of the 23rd annual conference on Computer graphics and interactive techniques SIGGRAPH 96}, keywords = {automated presentation,chat programs,comics,graphical histories,illustra,internet,non photorealistic rendering,tion,user interfaces,virtual worlds,world wide web}, number = {Annual Conference Series}, pages = {225--236}, publisher = {ACM Press}, series = {\{C\}omputer \{G\}raphics \{P\}roceedings, \{A\}nnual \{C\}onference \{S\}eries}, title = {{Comic Chat}}, url = {http://portal.acm.org/citation.cfm?doid=237170.237260}, volume = {96}, year = {1996} } @inproceedings{Lance2007, author = {Lance, Brent and Marsella, Stacy C and Rey, Marina Del}, booktitle = {Intelligent Virtual Agents (IVA'07)}, editor = {et Al., C. Pelachaud}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lance, Marsella, Rey - 2007 - Emotionally Expressive Head and Body Movement During Gaze Shifts.pdf:pdf}, pages = {72--85}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{Emotionally Expressive Head and Body Movement During Gaze Shifts}}, volume = {4722}, year = {2007} } @article{Webster1997, author = {Webster, Jane and Ho, Hayes}, doi = {10.1145/264701.264706}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Webster, Ho - 1997 - Audience Engagement in Multimedia Presentations.pdf:pdf}, issn = {00950033}, journal = {ACM SIGMIS Database}, keywords = {computer-based presentations,engagement,playfulness}, month = apr, number = {2}, pages = {63--77}, title = {{Audience Engagement in Multimedia Presentations}}, url = {http://portal.acm.org/citation.cfm?doid=264701.264706}, volume = {28}, year = {1997} } @book{Fogg2003, abstract = {Can computers change what you think and do? Can they motivate you to stop smoking, persuade you to buy insurance, or convince you to join the Army? "Yes, they can," says Dr. B.J. Fogg, director of the Persuasive Technology Lab at Stanford University. Fogg has coined the phrase "Captology"(an acronym for computers as persuasive technologies) to capture the domain of research, design, and applications of persuasive computers.In this thought-provoking book, based on nine years of research in captology, Dr. Fogg reveals how Web sites, software applications, and mobile devices can be used to change people's attitudes and behavior. Technology designers, marketers, researchers, consumersanyone who wants to leverage or simply understand the persuasive power of interactive technologywill appreciate the compelling insights and illuminating examples found inside. Persuasive technology can be controversialand it should be. Who will wield this power of digital influence? And to what end? Now is the time to survey the issues and explore the principles of persuasive technology, and B.J. Fogg has written this book to be your guide. Filled with key term definitions in persuasive computingProvides frameworks for understanding this domainDescribes real examples of persuasive technologies}, author = {Fogg, B J}, booktitle = {Persuasive Technology Using Computers to Change What We Think and Do}, doi = {10.4017/gt.2006.05.01.009.00}, editor = {Kort, Yvonne and IJsselsteijn, Wijnand and Midden, Cees and Eggen, Berry and Fogg, B J}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Fogg - 2003 - Persuasive Technology Using Computers to Change What We Think and Do.pdf:pdf}, isbn = {1558606432}, issn = {15691101}, number = {1}, pages = {283}, publisher = {Morgan Kaufmann}, series = {The Morgan Kaufmann series in interactive technologies}, title = {{Persuasive Technology: Using Computers to Change What We Think and Do}}, url = {http://books.google.com/books?id=r9JIkNjjTfEC\&pgis=1}, volume = {5}, year = {2003} } @article{Prevost1994, abstract = {This paper presents a theory and a computational implementation for generating prosodically appropriate synthetic speech in response to database queries. Proper distinctions of contrast and emphasis are expressed in an intonation contour that is synthesized by rule under the control of a grammar, a discourse model, and a knowledge base. The theory is based on Combinatory Categorial Grammar, a formalism which easily integrates the notions of syntactic constituency, semantics, prosodic phrasing and information structure. Results from our current implementation demonstrate the system's ability to generate a variety of intonational possibilities for a given sentence depending on the discourse context.}, author = {Prevost, Scott and Steedman, Mark}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Prevost, Steedman - 1994 - Specifying Intonation from Context for Speech Synthesis.pdf:pdf}, journal = {Speech Communication}, number = {1-2}, pages = {18}, publisher = {Citeseer}, title = {{Specifying Intonation from Context for Speech Synthesis}}, url = {http://arxiv.org/abs/cmp-lg/9407015}, volume = {15}, year = {1994} } @article{Grahe1999, author = {Grahe, JE}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Grahe - 1999 - The importance of nonverbal cues in judging rapport.pdf:pdf}, journal = {Journal of Nonverbal behavior}, number = {4}, pages = {253--269}, title = {{The importance of nonverbal cues in judging rapport}}, url = {http://www.springerlink.com/index/V8U30855W38M4673.pdf}, volume = {23}, year = {1999} } @incollection{Catrambone2004, author = {Catrambone, Richard and Stasko, John and Xiao, Jun}, booktitle = {From Brows to Trust: Evaluating Embodied Conversational Agents}, chapter = {9}, editor = {Ruttkay, Zs\'{o}fia and Pelachaud, Catherine}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Catrambone, Stasko, Xiao - 2004 - ECA as User Interface Paradigm Experimental Findings within a Framework for Research.pdf:pdf}, isbn = {1-4020-2730-3}, keywords = {embodied conversational agent,evaluation,research framework,task}, pages = {239--267}, publisher = {Kluwer Academic Publishers}, title = {{ECA as User Interface Paradigm: Experimental Findings within a Framework for Research}}, year = {2004} } @article{Berry1997, author = {Berry, D S and Pennebaker, James W and Mueller, J S and Hiller, W S}, doi = {10.1177/0146167297235008}, issn = {01461672}, journal = {Personality and Social Psychology Bulletin}, number = {5}, pages = {526--537}, title = {{Linguistic Bases of Social Perception}}, url = {http://psp.sagepub.com/cgi/doi/10.1177/0146167297235008}, volume = {23}, year = {1997} } @article{Russell2003a, abstract = {At the heart of emotion, mood, and any other emotionally charged event are states experienced as simply feeling good or bad, energized or enervated. These states-called core affect-influence reflexes, perception, cognition, and behavior and are influenced by many causes internal and external, but people have no direct access to these causal connections. Core affect can therefore be experienced as free-floating (mood) or can be attributed to some cause (and thereby begin an emotional episode). These basic processes spawn a broad framework that includes perception of the core-affect-altering properties of stimuli, motives, empathy, emotional meta-experience, and affect versus emotion regulation; it accounts for prototypical emotional episodes, such as fear and anger, as core affect attributed to something plus various nonemotional processes.}, author = {Russell, James A}, institution = {Department of Psychology, Boston College, Chestnut Hill, Massachusetts 02467, USA. james.russell@bc.edu}, journal = {Psychological Review}, number = {1}, pages = {145--172}, pmid = {12529060}, publisher = {[Washington, etc.] American Psychological Association [etc.]}, title = {{Core affect and the psychological construction of emotion.}}, url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/0033-295X.110.1.145}, volume = {110}, year = {2003} } @article{Cassell2001a, abstract = {Prior research into embodied interface agents has found that users like them and find them engaging. However, results on the effectiveness of these interfaces for task completion have been mixed. In this paper, we argue that embodiment can serve an even stronger function if system designers use actual human conversational protocols in the design of the interface. Communicative behaviors such as salutations and farewells, conversational turn-taking with interruptions, and describing objects using hand gestures are examples of protocols that all native speakers of a language already know how to perform and can thus be leveraged in an intelligent interface. We discuss how these protocols are integrated into Rea, an embodied, multi-modal interface agent who acts as a real-estate salesperson, and we show why embodiment is required for their successful implementation.}, author = {Cassell, Justine}, doi = {10.1016/S0950-7051(00)00102-7}, issn = {09507051}, journal = {Knowledge-Based Systems}, keywords = {communicative behavior,embodied conversational agent,embodied interface agent,rea}, number = {1-2}, pages = {55--64}, publisher = {Elsevier}, title = {{More than just a pretty face: conversational protocols and the affordances of embodiment}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0950705100001027}, volume = {14}, year = {2001} } @misc{Putten2009, abstract = {This study investigates whether humans perceive a higher degree of social presence when interacting with an animated character that displays natural as opposed to no listening behaviors and whether this interacts with people’s believe that they are interacting with an agent or an avatar. In a 2x2 between subjects experimental design 83 participants were either made believe that they encounter an agent, or that they communicate with another participant mediated by an avatar. In fact, in both conditions the communication partner was an autonomous agent that either exhibited high or low behavioral realism. We found that participants experienced equal amounts of presence, regardless of Behavioral interacting realism, however, had with an agent or an avatar. an impact on the subjective feeling of presence: people confronted with a character displaying high behavioral realism reported a higher degree of mutual awareness.}, author = {P\"{u}tten, Astrid M Von Der and Kr\"{a}mer, Nicole C and Gratch, Jonathan}, booktitle = {Design}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/P\"{u}tten, Kr\"{a}mer, Gratch - 2009 - Who's there Can a Virtual Agent Really Elicit Social Presence.pdf:pdf}, keywords = {avatars,behavioral realism,experimental study,virtual agents}, pages = {1--7}, title = {{Who's there? Can a Virtual Agent Really Elicit Social Presence?}}, year = {2009} } @incollection{Burke2002, address = {New-York,NY}, author = {Burke, B. L. and Arkowitz, H. and Dunn, C.}, booktitle = {Motivational Interviewing: Preparing People for Change}, edition = {2nd}, pages = {217--250}, publisher = {Guilford Press}, title = {{The Efficacy of Motivational Interviewing and Its Adaptation}}, year = {2002} } @article{Mehrabian1972, author = {Mehrabian, Albert and Epstein, N}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mehrabian, Epstein - 1972 - A measure of emotional empathy.pdf:pdf}, journal = {Journal of Personality}, number = {4}, pages = {525--543}, pmid = {4642390}, publisher = {Wiley Online Library}, title = {{A measure of emotional empathy.}}, volume = {40}, year = {1972} } @article{Slater2009, author = {Slater, Mel and Khanna, Pankaj and Mortensen, Jesper and Yu, Insu}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Slater et al. - 2009 - Visual realism enhances realistic response in an immersive virtual environment.pdf:pdf}, issn = {0272-1716}, journal = {IEEE computer graphics and applications}, keywords = {Algorithms,Computer Graphics,Computer Simulation,Decision Support Techniques,Ecosystem,Information Storage and Retrieval,Information Storage and Retrieval: methods,Models,Theoretical,User-Computer Interface}, number = {3}, pages = {76--84}, pmid = {19642617}, title = {{Visual realism enhances realistic response in an immersive virtual environment.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19642617}, volume = {29}, year = {2009} } @article{Sagi1976, author = {Sagi, A. and Hoffman, M. L.}, journal = {Developmental Psychology}, pages = {175--176}, title = {{Empathic distress in newborns}}, volume = {12}, year = {1976} } @article{Shapiro2011, author = {Shapiro, Ari}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Shapiro - 2011 - Building a character animation system.pdf:pdf}, journal = {Motion in Games(MIG'11)}, keywords = {animation,character,graphics,system}, number = {7060}, pages = {98--109}, title = {{Building a character animation system}}, url = {http://www.springerlink.com/index/L24P125448583571.pdf}, volume = {LNCS}, year = {2011} } @inproceedings{Nguyen2009c, author = {Nguyen, H. and Masthoff, Judith}, booktitle = {Proceedings of the 4th International Conference on Persuasive Technology}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Nguyen, Masthoff - 2009 - Designing empathic computers the effect of multimodal empathic feedback using animated agent(2).pdf:pdf}, isbn = {9781605583761}, keywords = {affective computing,design,experimentation,human factors}, pages = {7}, publisher = {ACM}, title = {{Designing empathic computers: the effect of multimodal empathic feedback using animated agent}}, url = {http://dl.acm.org/citation.cfm?id=1541958}, year = {2009} } @article{Dehn2000, abstract = {Over the last years, the animation of interface agents has been the target of increasing interest. Largely, this increase in attention is fuelled by speculated effects on human motivation and cognition. However, empirical investigations on the effect of animated agents are still small in number and differ with regard to the measured effects. Our aim is two-fold. First, we provide a comprehensive and systematic overview of the empirical studies conducted so far in order to investigate effects of animated agents on the user's experience, behaviour and performance. Second, by discussing both implications and limitations of the existing studies, we identify some general requirements and suggestions for future studies.}, author = {Dehn, Doris M and {Van Mulken}, Susanne}, doi = {10.1006/ijhc.1999.0325}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Dehn, Van Mulken - 2000 - The impact of animated interface agents a review of empirical research.pdf:pdf}, issn = {10715819}, journal = {International Journal of Human-Computer Studies}, number = {1}, pages = {1--22}, publisher = {Academic Press, Inc.}, title = {{The impact of animated interface agents: a review of empirical research}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S1071581999903257}, volume = {52}, year = {2000} } @inproceedings{Ullrich2008, address = {Yokohama, Japan}, author = {Ullrich, Sebastian and Prendinger, Helmut and Ishizuka, Mitsuru}, booktitle = {2008 International Conference on Advances in Computer Entertainment Technology (ACE '08)}, doi = {10.1145/1501750.1501781}, pages = {134--137}, publisher = {ACM}, title = {{MPML3D: agent authoring language for virtual worlds}}, year = {2008} } @article{Webster2006, author = {Webster, Jane and Ahuja, Jaspreet S}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Webster, Ahuja - 2006 - Enhancing the Design of Web Navigation Systems on The Influence of User Disorientation and Engagement and Perfo.pdf:pdf}, journal = {Management Information Systems Research Center Quarterly}, keywords = {commerce,commercial,design,electronic,electronic shopping,internet,web navigation,web-based learning,wide web}, number = {3}, pages = {661--678}, title = {{Enhancing the Design of Web Navigation Systems : on The Influence of User Disorientation and Engagement and Performance}}, volume = {30}, year = {2006} } @article{Ekman1986, author = {Ekman, Paul and Friesen, W. V}, journal = {Motivation and Emotion}, number = {2}, pages = {159--168}, title = {{A new pancultural facial expression of emotion}}, volume = {10}, year = {1986} } @article{Pentland2000, author = {Pentland, A.}, journal = {Communications of the ACM}, number = {3}, pages = {35--44}, title = {{Looking at people}}, volume = {43}, year = {2000} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{D'Mello2009, abstract = {We explored the reliability of detecting learners' affect by monitoring their gross body language (body position and arousal) during interactions with an intelligent tutoring system called AutoTutor. Training and validation data on affective states were collected in a learning session with AutoTutor, after which the learners' affective states (i.e., emotions) were rated by the learner, a peer, and two trained judges. An automated body pressure measurement system was used to capture the pressure exerted by the learner on the seat and back of a chair during the tutoring session. We extracted two sets of features from the pressure maps. The first set focused on the average pressure exerted, along with the magnitude and direction of changes in the pressure during emotional experiences. The second set of features monitored the spatial and temporal properties of naturally occurring pockets of pressure. We constructed five data sets that temporally integrated the affective judgments with the two sets of pressure features. The first four datasets corresponded to judgments of the learner, a peer, and two trained judges, whereas the final data set integrated judgments of the two trained judges. Machine-learning experiments yielded affect detection accuracies of 73\%, 72\%, 70\%, 83\%, and 74\%, respectively (chance=50\%) in detecting boredom, confusion, delight, flow, and frustration, from neutral. Accuracies involving discriminations between two, three, four, and five affective states (excluding neutral) were 71\%, 55\%, 46\%, and 40\% with chance rates being 50\%, 33\%, 25\%, and 20\%, respectively.}, author = {D'Mello, Sidney and Graesser, Arthur C}, doi = {10.1080/08839510802631745}, issn = {08839514}, journal = {Applied Artificial Intelligence}, number = {2}, pages = {123--150}, publisher = {Taylor \& Francis, Inc.}, title = {{Automatic Detection of Learner's Affect From Gross Body Language}}, url = {http://www.tandfonline.com/doi/abs/10.1080/08839510802631745}, volume = {23}, year = {2009} } @inproceedings{Vargas2010, abstract = {Multimodal conversational dialogue sys- tems consisting of numerous software components create challenges for the un- derlying software architecture and devel- opment practices. Typically, such sys- tems are built on separate, often pre- existing components developed by dif- ferent organizations and integrated in a highly iterative way. The traditional dia- logue system pipeline is not flexible enough to address the needs of highly in- teractive systems, which include parallel processing of multimodal input and out- put. We present an architectural solution for a multimodal conversational social dialogue system.}, address = {The University of Tokyo}, author = {Vargas, C Emilio and Field, Debora}, booktitle = {Proceedings of SIGDIAL 2010: the 11th Annual Meeting of the Special Interest Group on Discourse and Dialogue}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Vargas, Field - 2010 - ‘ How was your day ’ An architecture for multimodal ECA systems.pdf:pdf}, pages = {47--50}, publisher = {Association for Computational Linguistics}, title = {{‘ How was your day ?’ An architecture for multimodal ECA systems}}, year = {2010} } @article{Anderson1962, author = {Anderson, Robert P. and Anderson, Gordon V.}, doi = {10.1002/j.2164-4918.1962.tb02226.x}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Anderson, Anderson - 1962 - Development of an Instrument for Measuring Rapport.pdf:pdf}, issn = {00315737}, journal = {The Personnel and Guidance Journal}, month = sep, number = {1}, pages = {18--24}, title = {{Development of an Instrument for Measuring Rapport}}, url = {http://doi.wiley.com/10.1002/j.2164-4918.1962.tb02226.x}, volume = {41}, year = {1962} } @inproceedings{Magerko2011, abstract = {This article presents our work on building a virtual coach agent, called Dr. Vicky, and training environment (called the Virtual BNI Trainer, or VBT) for learning how to correctly talk with medical patients who have substance abuse issues. This work focuses on how to effectively design menu-based dialogue interactions for conversing with a virtual patient within the context of learning how to properly engage in such conversations according to the brief negotiated interview techniques we desire to train. Dr. Vicky also employs a model of student knowledge to influence the mediation strategies used in personalizing the training experience and guidance offered. The VBT is a prototype training application that will be used by medical students and practitioners within the Yale medical community in the future.}, author = {Magerko, Brian and Dean, James and Idnani, Avinash and Pantalon, Michael and Onofrio, Gail D}, booktitle = {Association for the Advancement of Artificial Intelligence (AAAI) Spring Symposium}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Magerko et al. - 2011 - Dr. Vicky A Virtual Coach for Learning Brief Negotiated Interview Techniques for Treating Emergency Room Patien.pdf:pdf}, keywords = {AAAI Technical Report SS-11-01}, pages = {25--32}, publisher = {Association for the Advancement of Artificial Intelligence (www.aaai.org)}, title = {{Dr. Vicky : A Virtual Coach for Learning Brief Negotiated Interview Techniques for Treating Emergency Room Patients}}, year = {2011} } @article{Diseker1981, author = {Diseker, R. A. and Michielutte, R.}, journal = {Journal ofMedical Education}, pages = {1004--1010}, title = {{An analysis of empathy in medical students before and following clinical experience}}, volume = {56}, year = {1981} } @article{Paiva2000, author = {Paiva, Ana}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Paiva - 2000 - Affective interactions toward a new generation of computer interfaces.pdf:pdf}, journal = {Affective interactions}, pages = {1--8}, title = {{Affective interactions: toward a new generation of computer interfaces?}}, url = {http://www.springerlink.com/index/826w110p65762167.pdf}, year = {2000} } @article{Davis1994a, abstract = {This investigation used data from Loehlin and Nichols's (1976) study of over 800 sets of twins to examine evidence for the heritability of three facets of empathy: empathic concern, personal distress, and perspective taking. Expert judges first identified sets of adjectives, included within Loehlin and Nichols's original data, which reflected each empathy construct; these items were then validated in an independent sample. Comparisons of the responses given to these items by identical and fraternal twins in the Loehlin and Nichols investigation revealed evidence of significant heritability for characteristics associated with the two affective facets of empathy-empathic concern and personal distress-but not for the nonaffective construct of perspective taking. This pattern is consistent with the view that temperamental emotionality may underlie the heritability of affective empathy.}, author = {Davis, Mark H. and Luce, C and Kraus, S J}, institution = {Department of Behavioral Science, Eckerd College, St. Petersburg, FL 33733.}, journal = {Journal of Personality}, keywords = {adolescent,affect,dizygotic,dizygotic psychology,empathy,female,humans,interpersonal relations,male,monozygotic,monozygotic psychology,personality,personality genetics,personality tests,psychological,reproducibility results,research design,stress,temperament,twins}, number = {3}, pages = {369--391}, pmid = {7965564}, title = {{The heritability of characteristics associated with dispositional empathy.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/7965564}, volume = {62}, year = {1994} } @article{Gunes2010, author = {Gunes, Hatice and Pantic, Maja}, doi = {10.4018/jse.2010101605}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gunes, Pantic - 2010 - Automatic, Dimensional and Continuous Emotion Recognition.pdf:pdf}, issn = {1947-9093}, journal = {International Journal of Synthetic Emotions}, keywords = {bodily expression,continuous emotion recognition,dimensional emotion modelling,emotional acoustic and bio-signals,facial expression,multimodal fusion}, month = jan, number = {1}, pages = {68--99}, title = {{Automatic, Dimensional and Continuous Emotion Recognition}}, volume = {1}, year = {2010} } @inproceedings{Takashima2008, abstract = {Blinking is one of the most important cues for forming person impressions. We focus on the eye blinking rate of avatars and investigate its effect on viewer subjective impressions. Two experiments are conducted. The stimulus avatars included humans with generic reality (male and female), cartoon-style humans (male and female), animals, and unidentified life forms that were presented as a 20-second animation with various blink rates: 9, 12, 18, 24 and 36 blinks/min. Subjects rated their impressions of the presented stimulus avatars on a seven-point semantic differential scale. The results showed a significant effect of the avatars blinking on viewer impressions and it was larger with the human- style avatars than the others. The results also lead to several implications and guidelines for the design of avatar representation. Blink animation of 18 blinks/min with a human-style avatar produces the friendliest impression. The higher blink rates, i.e., 36 blinks/min, give inactive impressions while the lower blink rates, i.e., 9 blinks/min, give intelligent impressions. Through these results, guidelines are derived for managing attractiveness of avatar by changing the avatars blinking rate.}, author = {Takashima, Kazuki and Omori, Yasuko and Yoshimoto, Yoshiharu and Itoh, Yuich and Kitamura, Yoshifumi and Kishino, Fumio}, booktitle = {Intelligence}, pages = {169--176}, publisher = {Canadian Information Processing Society}, title = {{Effects of avatar's blinking animation on person impressions}}, url = {http://portal.acm.org/citation.cfm?id=1375744}, year = {2008} } @book{Stueber2006, author = {Stueber, Karsten}, edition = {1}, isbn = {026219550X}, publisher = {MIT Press}, title = {{Rediscovering Empathy: Agency, Folk Psychology, and the Human Sciences}}, year = {2006} } @book{Kipp2005, author = {Kipp, Michael}, isbn = {1581122551, 9781581122558}, publisher = {Universal-Publishers}, title = {{Gesture Generation By Imitation: From Human Behavior To Computer Character Animation}}, year = {2005} } @article{Jacques1995, author = {Jacques, R. and Preece, J. and Carey, T.}, journal = {Canadian Journal of Educational Communication}, number = {1}, pages = {49--59}, title = {{Engagement as a design concept for multimedia}}, volume = {24}, year = {1995} } @article{Lisetti2013, author = {Lisetti, Christine and Amini, Reza and Yasavur, Ugan and Rishe, Naphtali}, doi = {10.1145/2544103}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lisetti et al. - 2013 - I Can Help You Change ! An Empathic Virtual Agent Delivers Behavior.pdf:pdf}, journal = {ACMTransactions on Management Information Systems}, number = {4}, pages = {1--28}, title = {{I Can Help You Change ! An Empathic Virtual Agent Delivers Behavior}}, volume = {4}, year = {2013} } @article{Kobayashi2012, abstract = {Research on the social and psychological effects of mobile phone communication primarily is conducted using self-report survey measures. However, recent studies have suggested such measures of mobile phone communication use contain a significant amount of measurement error. This study compares the frequency of mobile phone use measured by self-report questions with error-free log data automatically collected through an Android smartphone application. We investigate the extent to which measurement non-random error exists in the self-report questions and the predictors of this error. The data were collected from a sample of 310 Android phone users residing in Japan. Our analysis shows that users generally over- report their frequency of mobile communication and that over-estimation is better predicted by proxy measures of social activity than demographic variables. We further show an example of how over-reporting can result in an overestimation of the effects of mediated communication on civic engagement. Finally, the value of behavioral log data in mediated communication research is discussed.}, author = {Kobayashi, T and Boase, J}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kobayashi, Boase - 2012 - No such effect The implications of measurement error in self-report measures of mobile communication use.pdf:pdf}, journal = {Communication Methods and Measures}, keywords = {and dependent,behavioural,case that both independent,have relied heavily on,indeed,it is often the,log,measurement error,mobile communication,quantitative mediated communication researchers,self-report,self-report measures,smartphone,spurious correlation,when gathering data}, number = {2}, pages = {1--18}, title = {{No such effect? The implications of measurement error in self-report measures of mobile communication use}}, url = {http://www.tandfonline.com/doi/abs/10.1080/19312458.2012.679243}, volume = {6}, year = {2012} } @article{Pierre-Yves2003, author = {Pierre-Yves, O}, doi = {10.1016/S1071-5819(02)00141-6}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pierre-Yves - 2003 - The production and recognition of emotions in speech features and algorithms.pdf:pdf}, issn = {10715819}, journal = {International Journal of Human-Computer Studies}, keywords = {emotion production,emotion recognition,emotions,robots,speech}, month = jul, number = {1-2}, pages = {157--183}, title = {{The production and recognition of emotions in speech: features and algorithms}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S1071581902001416}, volume = {59}, year = {2003} } @inproceedings{Adam2006, abstract = {Nowadays, more and more artificial agents integrate emotional abil- ities, for different purposes: expressivity, adaptability, believability... Designers mainly use Ortony et al.’s typology of emotions, that provides a formalization of twenty-two emotions based on psychological theories. But most of them restrain their agents to a fewemotions among these twenty-two ones, and are more or less faithful to their definition. In this paper we propose to extend standard BDI (be- lief, desire, intention) logics to account formore emotions while trying to respect their definitions as exactly as possible.}, author = {Adam, Carole and Gaudou, Benoit and Herzig, Andreas and Longin, Dominique}, booktitle = {The 12th International Conference on Artificial Intelligence: Methodology, Systems, Applications}, editor = {Euzenat, J. and Domingue, J.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Adam et al. - 2006 - OCC's emotions a formalization in a BDI logic.pdf:pdf}, pages = {24--32}, publisher = {Springer-Verlag}, title = {{OCC's emotions: a formalization in a BDI logic}}, url = {http://www.springerlink.com/index/F45K737M44J10840.pdf}, volume = {9}, year = {2006} } @article{Ververidis2006, author = {Ververidis, Dimitrios}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ververidis - 2006 - Emotional speech recognition Resources, features, and methods.pdf:pdf}, journal = {Speech communication}, keywords = {acoustic features,emotional speech classification,emotional speech data collections,emotions,interfaces,stress}, number = {April}, title = {{Emotional speech recognition: Resources, features, and methods}}, url = {http://www.sciencedirect.com/science/article/pii/S0167639306000422}, year = {2006} } @article{Pelachaud2009, abstract = {Over the past few years we have been developing an expressive embodied conversational agent system. In particular, we have developed a model of multimodal behaviours that includes dynamism and complex facial expressions. The first feature refers to the qualitative execution of behaviours. Our model is based on perceptual studies and encompasses several parameters that modulate multimodal behaviours. The second feature, the model of complex expressions, follows a componential approach where a new expression is obtained by combining facial areas of other expressions. Lately we have been working on adding temporal dynamism to expressions. So far they have been designed statically, typically at their apex. Only full-blown expressions could be modelled. To overcome this limitation, we have defined a representation scheme that describes the temporal evolution of the expression of an emotion. It is no longer represented by a static definition but by a temporally ordered sequence of multimodal signals.}, author = {Pelachaud, Catherine}, doi = {10.1098/rstb.2009.0186}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pelachaud - 2009 - Modelling multimodal expression of emotion in a virtual agent.pdf:pdf}, issn = {1471-2970}, journal = {Philosophical transactions of the Royal Society of London. Series B, Biological sciences}, keywords = {Computer Simulation,Emotions,Emotions: physiology,Facial Expression,Humans,Models, Psychological,Social Behavior}, month = dec, number = {1535}, pages = {3539--48}, pmid = {19884148}, title = {{Modelling multimodal expression of emotion in a virtual agent.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=2781894\&tool=pmcentrez\&rendertype=abstract}, volume = {364}, year = {2009} } @inproceedings{Broek2005, abstract = {A new view on empathic agents is introduced, named: Empathic Agent Technology (EAT). It incorporates a speech analysis, which provides an indication for the amount of tension present in people. It is founded on an indirect physiological measure for the amount of experienced stress, defined as the variability of the fundamental frequency of the human voice. A thorough review of literature is provided on which the EAT is founded. In addition, the complete processing line of this measure is introduced. Hence, the first generally applicable, completely automated technique is introduced that enables the development of truly empathic agents.}, address = {Utrecht – The Netherlands}, author = {van den Broek, E. L.}, booktitle = {Proceedings of the AAMAS-05 Agent-Based Systems for Human Learning workshop (ABSHL 2005)}, editor = {Johnson, L. and Richards, D. and Sklar, E. and Wilensky, U.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Broek - 2005 - Empathic agent technology.pdf:pdf}, keywords = {affect,agents,emotion,empathy,fundamental frequency,pitch,speech,stress}, pages = {59--67}, publisher = {Brooklyn College}, title = {{Empathic agent technology}}, url = {http://eprints.eemcs.utwente.nl/21142/}, year = {2005} } @inproceedings{Hamza2004, abstract = {This paper introduces the IBM Expressive Speech Synthesis system. We describe recent work in improving the quality of our baseline text-to-speech system as well as extending our capabilities to generate expressive synthetic speech. We present results showing improved base quality, especially for sentences drawn from a limited domain. We also demonstrate our ability to convey good news and bad news, produce contrastive emphasis, and ask a question appropriately. In order to facilitate access to the expressive capabilities, we use some of our proposed extensions to the Speech Synthesis Markup Language (SSML).}, address = {Jeju, Korea}, author = {Hamza, Wael and Bakis, Raimo and Eide, EM and Picheny, MA and Pitrelli, JF}, booktitle = {Proc. of the 8th International Conference on Spoken Language Processing}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hamza et al. - 2004 - The IBM expressive speech synthesis system.pdf:pdf}, title = {{The IBM expressive speech synthesis system}}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.129.1962\&rep=rep1\&type=pdf}, year = {2004} } @inproceedings{Kipp2006a, abstract = {When using virtual characters in the human-computer inter- face the question arises of how useful this kind of interface is: whether the human user accepts, enjoys and profits from this form of interaction. Thorough system evaluations, however, are rarely done. We propose a post-questionnaire evaluation for a virtual character system that we ap- ply to COHIBIT, an interactive museum exhibit with virtual characters. The evaluation study investigates the subjects’ experiences with the ex- hibit with regard to informativeness, entertainment and virtual character perception. Our subjects rated the exhibit both entertaining and infor- mative and gave it a good overall mark. We discuss the detailed results and identify useful factors to consider when building and evaluating vir- tual character applications.}, author = {Kipp, Michael and Kipp, Kerstin H and Ndiaye, Alassane and Gebhard, Patrick}, booktitle = {Intelligent Virtual Agents (IVA'06)}, editor = {Gratch, J.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kipp et al. - 2006 - Evaluating the Tangible Interface and Virtual Characters in the Interactive COHIBIT Exhibit.pdf:pdf}, pages = {434--444}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{Evaluating the Tangible Interface and Virtual Characters in the Interactive COHIBIT Exhibit}}, year = {2006} } @article{Lisetti2003, abstract = {Accountingfor a patient’s emotional state is integral in medical care. Tele-health research attests to the challenge clinicians must overcome in assessing patient emotional state when modalities are limited (J. Adv. Nurs. 36(5) 668). The extra effort involved in addressingthis challenge requires attention, skill, and time. Large caseloads may not afford tele-home health- care (tele-HHC) clinicians the time and focus necessary to accurately assess emotional states and trends. Unstructured interviews with experienced tele-HHC providers support the introduction of objective indicators of patients’ emotional status in a useful form to enhance patient care. We discuss our contribution to addressingthis challenge, which involves building user models not only of the physical characteristics of users—in our case patients—but also models of their emotions. We explain our research in progress on Affective Computing for tele-HHC applications, which includes: developinga system architecture for monitoringand respondingto human multimodal affect and emotions via multimedia and empathetic avatars; mapping of physiological signals to emotions and synthesizing the patient’s affective information for the health-care provider. Our results usinga wireless non-invasive wearable computer to collect physiological signals and mapping these to emotional states show the feasibility of our approach, for which we lastly discuss the future research issues that we have identified.}, author = {Lisetti, Christine L and Nasoz, F. and LeRouge, C. and Ozyer, O. and Alvarez, K.}, doi = {10.1016/S1071-5819(03)00051-X}, issn = {10715819}, journal = {International Journal of Human-Computer Studies}, keywords = {Affective computing,Emotions,Human factors of multimedia systems,Intelligent user interfaces,Tele-health,Tele-home health care,User modeling}, month = jul, number = {1-2}, pages = {245--255}, title = {{Developing multimodal intelligent affective interfaces for tele-home health care}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S107158190300051X}, volume = {59}, year = {2003} } @article{Fong2003, author = {Fong, Terrence and Nourbakhsh, Illah and Dautenhahn, Kerstin}, doi = {10.1016/S0921-8890(02)00372-X}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Fong, Nourbakhsh, Dautenhahn - 2003 - A survey of socially interactive robots.pdf:pdf}, issn = {09218890}, journal = {Robotics and Autonomous Systems}, keywords = {human,interaction aware robot,robot interaction,sociable robot,social robot,socially interactive robot}, month = mar, number = {3-4}, pages = {143--166}, title = {{A survey of socially interactive robots}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S092188900200372X}, volume = {42}, year = {2003} } @inproceedings{Pontier2009, abstract = {There is a growing belief that the environment plays an important role in the healing process of patients, supported by empirical findings. Previous research showed that psychological stress caused by loneliness can be reduced by artificial companions. As a pilot application for this purpose, this paper presents an affective agent playing tic-tac-toe with the user. Experimenting with a number of agents under different parameter settings shows the agent is able to show human-like emotional behavior, and can make decisions based on rationality as well as on affective influences. After discussing the application with clinical experts and making improvements where needed, the application can be tested in a clinical setting in future research.}, author = {Pontier, Matthijs and Siddiqui, Ghazanfar Farooq}, booktitle = {PRIMA '09 Proceedings of the 12th International Conference on Principles of Practice in Multi-Agent Systems}, editor = {et al. (Eds.):, J.-J. Yang}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pontier, Siddiqui - 2009 - An Affective Agent Playing Tic-Tac-Toe as Part of a.pdf:pdf}, keywords = {cognitive modeling,emotion modeling,healing environment}, pages = {33--47}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{An Affective Agent Playing Tic-Tac-Toe as Part of a}}, year = {2009} } @book{Smith1966, address = {New York}, author = {Smith, A.}, publisher = {Augustus M. Kelley}, title = {{The theory of moral sentiments}}, year = {1966} } @inproceedings{Lee2009a, abstract = {During face-to-face conversation, the speaker’s head is con- tinually in motion. These movements serve a variety of im- portant communicative functions. Our goal is to develop a model of the speaker’s head movements that can be used to generate head movements for virtual agents based on a ges- ture annotation corpora. In this paper, we focus on the first step of the head movement generation process: predicting when the speaker should use head nods. We describe our machine-learning approach that creates a head nod model from annotated corpora of face-to-face human interaction, relying on the linguistic features of the surface text. We also describe the feature selection process, training process, and the evaluation of the learned model with test data in detail. The result shows that the model is able to predict head nods with high precision and recall.}, address = {Budapest, Hungary}, author = {Lee, Jina and Marsella, Stacy C}, booktitle = {8th Int'l Conf. on Autonomous Agents and Multiagent Systems (AAMAS 2009)}, editor = {Decker and Sichman and Sierra and Castelfranchi}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lee, Marsella - 2009 - Learning a Model of Speaker Head Nods using Gesture Corpora.pdf:pdf}, keywords = {bal behaviors,embodied conversational agents,head nods,machine learning,nonver-,virtual agents}, number = {Aamas}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems (www.ifaamas.org)}, title = {{Learning a Model of Speaker Head Nods using Gesture Corpora}}, year = {2009} } @article{O'Keefe1988, abstract = {Offers models of 3 alternative message design logics and describes a general method of message analysis based on these models. The method of analysis is exemplified in a study of messages used in addressing a regulative communication task. 92 undergraduates were asked to provide messages they would address to a subordinate who failed to complete assigned work; these messages were classified in terms of the kind of goal set being pursued and the kind of reasoning reflected in their design. Male and female Ss differed systematically in the message design logic they employed, and there were significant relationships between interpersonal construct differentiation and message design logic and goal structure. ((c) 1997 APA/PsycINFO, all rights reserved)}, author = {O'Keefe, Barbara J}, journal = {Communication Monographs}, number = {1}, pages = {80--103}, title = {{The logic of message design: Individual differences in reasoning about communication}}, volume = {55}, year = {1988} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{Li2006, author = {Li, Lan and Chen, Ji-hua}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Li, Chen - 2006 - Emotion recognition using physiological signals.pdf:pdf}, journal = {Advances in Artificial Reality and Tele-Existence - Lecture Notes in Computer Science}, pages = {437--446}, title = {{Emotion recognition using physiological signals}}, url = {http://link.springer.com/chapter/10.1007/11941354\_44}, volume = {4282}, year = {2006} } @article{Ha2010, abstract = {Effective doctor-patient communication is a central clinical function in building a therapeutic doctor-patient relationship, which is the heart and art of medicine. This is important in the delivery of high-quality health care. Much patient dissatisfaction and many complaints are due to breakdown in the doctor-patient relationship. However, many doctors tend to overestimate their ability in communication. Over the years, much has been published in the literature on this important topic. We review the literature on doctor-patient communication.}, author = {Ha, Jennifer Fong and Longnecker, Nancy}, editor = {Pendleton, D and Hasler, J}, journal = {The Ochsner journal}, number = {1}, pages = {38--43}, publisher = {the Academic Division of Ochsner Clinic Foundation}, title = {{Doctor-Patient Communication: A Review}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=3096184\&tool=pmcentrez\&rendertype=abstract}, volume = {10}, year = {2010} } @inproceedings{Pasquariello2001, author = {Pasquariello, Stefano and Pelachaud, Catherine}, booktitle = {Proceedings 6th Online World Conference on Soft Computing in Industrial Appications Session on Soft Computing for Intelligent 3D Agents}, doi = {10.1007/978-1-4471-0123-9\_43}, editor = {{R. Roy}}, pages = {511--525}, publisher = {Springer-Verlag London}, title = {{Greta: A Simple Facial Animation Engine}}, year = {2001} } @article{Russell2003, abstract = {A flurry of theoretical and empirical work concerning the production of and response to facial and vocal expressions has occurred in the past decade. That emotional expressions express emotions is a tautology but may not be a fact. Debates have centered on universality, the nature of emotion, and the link between emotions and expressions. Modern evolutionary theory is informing more models, emphasizing that expressions are directed at a receiver, that the interests of sender and receiver can conflict, that there are many determinants of sending an expression in addition to emotion, that expressions influence the receiver in a variety of ways, and that the receiver's response is more than simply decoding a message.}, author = {Russell, James a and Bachorowski, Jo-Anne and Fernandez-Dols, Jose-Miguel}, doi = {10.1146/annurev.psych.54.101601.145102}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Russell, Bachorowski, Fernandez-Dols - 2003 - Facial and vocal expressions of emotion.pdf:pdf}, issn = {0066-4308}, journal = {Annual review of psychology}, keywords = {Emotions,Expressed Emotion,Facial Expression,Humans,Interpersonal Relations,Nonverbal Communication,Personal Construct Theory,Social Perception,Speech Acoustics}, month = jan, pages = {329--49}, pmid = {12415074}, title = {{Facial and vocal expressions of emotion.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/12415074}, volume = {54}, year = {2003} } @article{Gerdes2009, abstract = {This article presents a social work model of empathy that reflects the latest interdisciplinary research findings on empathy. The model reflects the social work commitment to social justice. The three model components are: 1) the affective response to another’s emotions and actions; 2) the cognitive processing of one’s affective response and the other person’s perspective; and 3) the conscious decision-making to take empathic action. Mirrored affective responses are involuntary, while cognitive processing and conscious decision-making are voluntary. The affective component requires healthy, neural pathways to function appropriately and accurately. The cognitive aspects of perspective-taking, self-awareness, and emotion regulation can be practiced and cultivated, particularly through the use of mindfulness techniques. Empathic action requires that we move beyond affective responses and cognitive processing toward utilizing social work values and knowledge to inform our actions. By introducing the proposed model of empathy, we hope it will serve as a catalyst for discussion and future research and development of the model.}, author = {Gerdes, Karen E. and Segal, Elizabeth A.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gerdes, Segal - 2009 - A social work model of empathy.pdf:pdf}, journal = {Advances in Social Work}, keywords = {empathy,social cognitive neuroscience,social empathy}, number = {2}, pages = {114--127}, title = {{A social work model of empathy}}, url = {https://advancesinsocialwork.iupui.edu/index.php/advancesinsocialwork/article/viewArticle/235 http://journals.iupui.edu/index.php/advancesinsocialwork/article/view/235/215}, volume = {10}, year = {2009} } @article{Scherer2007, abstract = {In earlier work, the authors analyzed emotion portrayals by professional actors separately for facial expression, vocal expression, gestures, and body movements. In a secondary analysis of the combined data set for all these modalities, the authors now examine to what extent actors use prototypical multimodal configurations of expressive actions to portray different emotions, as predicted by basic emotion theories claiming that expressions are produced by fixed neuromotor affect programs. Although several coherent unimodal clusters are identified, the results show only 3 multimodal clusters: agitation, resignation, and joyful surprise, with only the latter being specific to a particular emotion. Finding variable expressions rather than prototypical patterns seems consistent with the notion that emotional expression is differentially driven by the results of sequential appraisal checks, as postulated by componential appraisal theories.}, author = {Scherer, Klaus R. and Ellgring, Heiner}, doi = {10.1037/1528-3542.7.1.158}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Scherer, Ellgring - 2007 - Multimodal expression of emotion affect programs or componential appraisal patterns.pdf:pdf}, issn = {1528-3542}, journal = {Emotion (Washington, D.C.)}, keywords = {Adult,Affect,Facial Expression,Female,Gestures,Humans,Judgment,Male,Psychomotor Performance,Speech Acoustics,Voice}, month = feb, number = {1}, pages = {158--71}, pmid = {17352571}, title = {{Multimodal expression of emotion: affect programs or componential appraisal patterns?}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17352571}, volume = {7}, year = {2007} } @techreport{Pereira2006, abstract = {In this report we present the Emotional-BDI architecture, an extension to the BDI architecture supporting Artificial Emotions and including internal representations for agent’s Capabilities and Resources. The architecture we present here, is conceptual, defining which components should exist so that Emotional- BDI agents can use Effective Capabilities as well as Effective Resources in order to better cope with highly dynamic environments.}, address = {Porto, Portugal}, author = {Pereira, David and Oliveira, Eugenio and Moreira, Nelma}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pereira, Oliveira, Moreira - 2006 - Towards an Architecture for Emotional BDI Agents.pdf:pdf}, institution = {Universidade do Porto}, keywords = {Artificial Emotions,BDI Agents}, title = {{Towards an Architecture for Emotional BDI Agents}}, year = {2006} } @article{Cowell2005, abstract = {For years, people have sought more natural means of communicating with their computers. Many have suggested that interaction with a computer should be as easy as interacting with other people, taking advantage of the multimodal nature of human communication. While users should, in theory, gravitate to such anthropomorphic embodiments, quite the contrary has been experienced; users generally have been dissatisfied and abandoned their use. This suggests a disconnect between factors that make human-human communication engaging and those used by designers to support human-agent interaction. This paper discusses a set of empirical studies that attempted to replicate human-human non-verbal behavior. The focus revolved around behaviors that portray a credible fa\c{c}ade, thereby helping embodied conversational agents (ECAs) to form a successful cooperative dyad with users. Based on a review of the non-verbal literature, a framework was created that identified trustworthy and credible non-verbal behaviors across five areas and formed design guidelines for character interaction. The design suggestions for those areas emanating from the facial region were experimentally supported but there was no concordant increase in perceived trust when bodily regions (posture, gesture) were added. In addition, in examining the importance of demographic elements in embodiment, it was found that users prefer to interact with characters that match their ethnicity and are young looking. There was no significant preference for gender. The implications of these results, as well as other interesting consequences are discussed.}, author = {Cowell, Andrew J. and Stanney, Kay M.}, doi = {10.1016/j.ijhcs.2004.11.008}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cowell, Stanney - 2005 - Manipulation of non-verbal interaction style and demographic embodiment to increase anthropomorphic computer ch.pdf:pdf}, issn = {10715819}, journal = {International Journal of Human-Computer Studies - Special issue: Subtle expressivity for characters and robots}, keywords = {Anthropomorphic interfaces,Interface agents,Non-verbal behavior}, month = feb, number = {2}, pages = {281--306}, title = {{Manipulation of non-verbal interaction style and demographic embodiment to increase anthropomorphic computer character credibility}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S1071581904001260 http://ocw.tudelft.nl/fileadmin/ocw/opener/Manipulation\_of\_non-verbal\_interaction\_style\_and\_demographic\_embodiment\_to\_increase\_anthropomorphic\_computer\_character\_credibility.pdf}, volume = {62}, year = {2005} } @techreport{Miller1981, abstract = {Thirty-one self-referred problem drinkers were randomly assigned to one of two modalities for behavioral self-control training with a goal of moderation: (1) minimal therapist contact, in which clients worked only with a self-help manual; and (2) therapist directed training, in which clients received self-help materials plus 10 individual treatment sessions. Both groups showed significant reductions in alcohol consumption and peak blood alcohol concentration. Contrary to expectations, there were no significant differences on outcome measures between groups. Results are interpreted within a self-control framework.}, author = {Miller, William R. and Gribskov, C J and Mortell, R L}, booktitle = {The International journal of the addictions}, number = {7}, pages = {1247--1254}, pmid = {7327785}, publisher = {Informa UK Ltd UK}, title = {{Effectiveness of a self-control manual for problem drinkers with and without therapist contact.}}, url = {http://informahealthcare.com/doi/abs/10.3109/10826088109039178}, volume = {16}, year = {1981} } @inproceedings{Mahoor2011, author = {Mahoor, Mohammad H. and Zhou, Mu and Veon, Kevin L. and Mavadati, S. Mohammad and Cohn, Jeffrey F.}, booktitle = {IEEE Workshop on Applications of Computer Vision (WACV)}, doi = {10.1109/FG.2011.5771420}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mahoor et al. - 2011 - Facial action unit recognition with sparse representation.pdf:pdf}, isbn = {978-1-4244-9140-7}, month = mar, pages = {336--342}, publisher = {IEEE}, title = {{Facial action unit recognition with sparse representation}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5771420}, year = {2011} } @article{Calvo2010, abstract = {This survey describes recent progress in the field of Affective Computing (AC), with a focus on affect detection. Although many AC researchers have traditionally attempted to remain agnostic to the different emotion theories proposed by psychologists, the affective technologies being developed are rife with theoretical assumptions that impact their effectiveness. Hence, an informed and integrated examination of emotion theories from multiple areas will need to become part of computing practice if truly effective real-world systems are to be achieved. This survey discusses theoretical perspectives that view emotions as expressions, embodiments, outcomes of cognitive appraisal, social constructs, products of neural circuitry, and psychological interpretations of basic feelings. It provides meta-analyses on existing reviews of affect detection systems that focus on traditional affect detection modalities like physiology, face, and voice, and also reviews emerging research on more novel channels such as text, body language, and complex multimodal systems. This survey explicitly explores the multidisciplinary foundation that underlies all AC applications by describing how AC researchers have incorporated psychological theories of emotion and how these theories affect research questions, methods, results, and their interpretations. In this way, models and methods can be compared, and emerging insights from various disciplines can be more expertly integrated.}, author = {Calvo, R.A. and D'Mello, S.}, doi = {10.1109/T-AFFC.2010.1}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Calvo, D'Mello - 2010 - Affect detection An interdisciplinary review of models, methods, and their applications.pdf:pdf}, journal = {IEEE Transactions on Affective Computing}, keywords = {Affective computing,affect sensing and analysis,emotion detection,emotion theory,multimodal recognition}, number = {1}, pages = {18--37}, publisher = {IEEE}, title = {{Affect detection: An interdisciplinary review of models, methods, and their applications}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5520655}, volume = {1}, year = {2010} } @book{Fridlund1994, address = {San Diego}, author = {Fridlund, A.J.}, publisher = {Academic Press}, title = {{Human Facial Expression: An Evolutionary View}}, year = {1994} } @article{Clark1998, abstract = {Speakers often repeat the first word of major constituents, as in, "I uh I wouldn't be surprised at that." Repeats like this divide into four stages: an initial commitment to the constituent (with "I"); the suspension of speech; a hiatus in speaking (filled with "uh"); and a restart of the constituent ("I wouldn't."). An analysis of all repeated articles and pronouns in two large corpora of spontaneous speech shows that the four stages reflect different principles. Speakers are more likely to make a premature commitment, immediately suspending their speech, as both the local constituent and the constituent containing it become more complex. They plan some of these suspensions from the start as preliminary commitments to what they are about to say. And they are more likely to restart a constituent the more their stopping has disrupted its delivery. We argue that the principles governing these stages are general and not specific to repeats.}, author = {Clark, H H and Wasow, T}, doi = {10.1006/cogp.1998.0693}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Clark, Wasow - 1998 - Repeating words in spontaneous speech.pdf:pdf}, issn = {0010-0285}, journal = {Cognitive psychology}, keywords = {Humans,Language,Verbal Behavior}, month = dec, number = {3}, pages = {201--42}, pmid = {9892548}, title = {{Repeating words in spontaneous speech.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/9892548}, volume = {37}, year = {1998} } @article{Hanna2007, abstract = {This article describes how an object-oriented approach can be applied to the architectural design of a spoken language dialog system with the aim of facilitating the modification, extension, and reuse of discourse-related expertise. The architecture of the developed system is described and a functionally similar VoiceXML system is used to provide a comparative baseline across a range of modification and reuse scenarios. It is shown that the use of an object-oriented dialog manager can provide a capable means of reusing existing discourse expertise in a manner that limits the degree of structural decay associated with system change.}, author = {Hanna, Philip and O'neill, Ian and Wootton, Craig and Mctear, Michael}, doi = {10.1145/1255171.1255173}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hanna et al. - 2007 - Promoting extension and reuse in a spoken dialog manager.pdf:pdf}, issn = {15504875}, journal = {ACM Transactions on Speech and Language Processing}, keywords = {Design,Human Factors,Human-computer interaction,dialog management,speech and language processing,spoken dialog systems}, mendeley-tags = {Design,Human Factors}, month = jul, number = {3}, pages = {Article 7 (July 2007), 39 pages}, title = {{Promoting extension and reuse in a spoken dialog manager}}, url = {http://portal.acm.org/citation.cfm?doid=1255171.1255173}, volume = {4}, year = {2007} } @inproceedings{Cramer2010, abstract = {Empathy has great potential in human-robot interaction. However, the challenging nature of assessing the user's emotional state points to the importance of also understanding the effects of empathic behaviours incongruent with users' affective experience. A 3x2 between-subject video-based survey experiment (N=133) was conducted with empathic robot behaviour (empathically accurate, neutral, inaccurate) and valence of the situation (positive, negative) as dimensions. Trust decreased when empathic responses were incongruent with the affective state of the user. However, in the negative valence condition, reported perceived empathic abilities were greater when the robot responded as if the situation were positive.}, author = {Cramer, Henriette and Goddijn, Jorrit and Wielinga, Bob and Evers, Vanessa}, booktitle = {HRI '10 Proceedings of the 5th ACM/IEEE international conference on Human-robot interaction}, doi = {10.1145/1734454.1734513}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cramer et al. - 2010 - Effects of (in) accurate empathy and situational valence on attitudes towards robots.pdf:pdf}, isbn = {9781424448937}, pages = {141--142}, publisher = {ACM}, title = {{Effects of (in) accurate empathy and situational valence on attitudes towards robots}}, url = {http://dl.acm.org/citation.cfm?id=1734513}, year = {2010} } @article{Hirsh2010, abstract = {Research generally indicates that providers demonstrate modest insight into their clinical decision processes. In a previous study utilizing virtual human (VH) technology, we found that patient demographic characteristics and facial expressions of pain were statistically significant predictors of many nurses' pain-related decisions. The current study examined the correspondence between the statistically identified and self-reported influences of contextual information on pain-related decisions. Fifty-four nurses viewed vignettes containing a video of a VH patient and text describing a postsurgical context. VH sex, race, age, and facial expression varied across vignettes. Participants made pain-assessment and treatment decisions on visual analogue scales. Participants subsequently indicated the information they relied on when making decisions. None of the participants reported using VH sex, race, or age in their decision process. Statistical modeling indicated that 28 to 54\% of participants (depending on the decision) used VH demographic cues. 76\% of participants demonstrated concordance between their reported and actual use of the VH facial expression cue. Vital signs, text-based clinical summary, and VH movement were also reported as influential factors. These data suggest that biases may be prominent in practitioner decision-making about pain, but that providers have minimal awareness of and/or a lack of willingness to acknowledge this bias. PERSPECTIVE: The current study highlights the complexity of provider decision-making about pain management. The VH technology could be used in future research and education applications aimed at improving the care of all persons in pain.}, author = {Hirsh, Adam T and Jensen, Mark P and Robinson, Michael E}, doi = {10.1016/j.jpain.2009.09.004}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hirsh, Jensen, Robinson - 2010 - Evaluation of nurses' self-insight into their pain assessment and treatment decisions.pdf:pdf}, issn = {1528-8447}, journal = {The journal of pain : official journal of the American Pain Society}, keywords = {Adult,Age Factors,Computer Simulation,Continental Population Groups,Cues,Facial Expression,Female,Humans,Male,Models, Statistical,Movement,Nurses,Nurses: psychology,Pain,Pain Management,Pain Measurement,Pain: diagnosis,Self-Assessment,Sex Factors,User-Computer Interface}, month = may, number = {5}, pages = {454--61}, pmid = {20015702}, publisher = {Elsevier Ltd}, title = {{Evaluation of nurses' self-insight into their pain assessment and treatment decisions.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=2864339\&tool=pmcentrez\&rendertype=abstract}, volume = {11}, year = {2010} } @article{Hunsdahl1967, author = {Hunsdahl, JB}, doi = {10.1016/0022-1910(58)90015-5}, issn = {00221910}, journal = {Journal of the History of the Behavioral}, number = {4}, pages = {298--312}, title = {{Concerning Einf\"{u}hlung (empathy): A concept analysis of its origin and early development}}, volume = {2}, year = {1967} } @book{Frith2003, abstract = {The updated edition of this classic account of autism includes a new chapter outlining recent developments in neuropsychological research, and overviews one of the most important theoretical and practical consequences of Frith's original insights into this puzzling condition.Updated edition of this classic account of autism. Includes new sections covering practical and theoretical developments, and a chapter on recent investigations of the neurological basis of psychological impairments in autism. Accessible to a broad general readership.}, author = {Frith, U}, booktitle = {British Journal of Developmental Psychology}, editor = {Blackwell}, keywords = {autism}, pages = {204}, publisher = {Blackwells}, series = {Cognitive Development}, title = {{Autism: Explaining the enigma}}, url = {http://discovery.ucl.ac.uk/75512/}, volume = {21}, year = {2003} } @inproceedings{Dias2005, abstract = {Interactive virtual environments (IVEs) are now seen as an engaging new way by which children learn experimental sciences and other disciplines. These environments are populated by synthetic characters that guide and stimulate the children activities. In order to build such environments, one needs to address the problem of how achieve believable and empathic characters that act autonomously. Inspired by the work of traditional character animators, this paper proposes an architectural model to build autonomous characters where the agent’s reasoning and behaviour is influenced by its emotional state and personality. We performed a small case evaluation in order to determine if the characters evoked empathic reactions in the users with positive results.}, address = {Covilh\~{a}, Portugal}, author = {Dias, J. and Paiva, Ana}, booktitle = {EPIA 2005, 12th Portuguese Conference on Artificial Intelligence}, doi = {10.1007/11595014\_13}, editor = {Bento, Carlos and Cardoso, Am\'{\i}lcar and Dias, Ga\"{e}l}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Dias, Paiva - 2005 - Feeling and reasoning A computational model for emotional characters(2).pdf:pdf}, pages = {127--140}, publisher = {Springer Berlin / Heidelberg}, title = {{Feeling and reasoning: A computational model for emotional characters}}, url = {http://www.springerlink.com/index/YQ18H62602413554.pdf}, year = {2005} } @article{Ambady1992, abstract = {A meta-analysis was conducted on the accuracy of predictions of various objective outcomes in the areas of social and clinical psychology from short observations of expressive behavior (under 5 min). The overall effect size (ry for the accuracy of predictions for 38 different resu1ts was .39. Studies using longer periods of behavioral observation did not yield greater predictive accuracy; predictions based on observations under 112 min in length did not differ significantly from predictions based on 4- and 5-min observations. The type ofbehavioral channel (such as the face, speech, the body, tone ofvoic on which the ratings were based was not related to the accuracy of predictions. Accuracy did not vary significantly between behaviors rnanipulated in a laboratory and more naturally occurring behavior. L t effect sizes did not differ significantly for predictions in the areas of clinical psychology social psychology, and the accuracy of detecting deception.}, author = {Ambady, N and Rosenthal, Robert}, doi = {10.1037/0033-2909.111.2.256}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ambady, Rosenthal - 1992 - Thin slices of expressive behavior as predictors of interpersonal consequences A meta-analysis.pdf:pdf}, issn = {00332909}, journal = {Psychological Bulletin}, number = {2}, pages = {256--274}, publisher = {American Psychological Association}, title = {{Thin slices of expressive behavior as predictors of interpersonal consequences: A meta-analysis.}}, volume = {111}, year = {1992} } @article{Peters2005, abstract = {One of the major problems of user's interaction with Embodied Conversational Agents (ECAs) is to have the conversation last more than few second: after being amused and intrigued by the ECAs, users may find rapidly the restrictions and limitations of the dialog systems, they may perceive the repetition of the ECAs animation, they may find the behaviors of ECAs to be inconsistent and implausible, etc. We believe that some special links, or bonds, have to be established between users and ECAs during interaction. It is our view that showing and/or perceiving interest is the necessary premise to establish a relationship. In this paper we present a model of an ECA able to establish, maintain and end the conversation based on its perception of the level of interest of its interlocutor.}, author = {Peters, Christopher and Pelachaud, Catherine and Bevacqua, Elisabetta and Mancini, Maurizio and Poggi, Isabella}, doi = {10.1007/11550617\_20}, editor = {Panayiotopoulos, Themis and Gratch, Jonathan and Aylett, Ruth and Ballin, Daniel and Olivier, Patrick and Rist, Thomas}, isbn = {9783540287384}, journal = {Intelligent Virtual Agents}, pages = {229--240}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, title = {{A model of attention and interest using gaze behavior}}, url = {http://www.springerlink.com/index/8gqh2c9phmhb12jd.pdf}, volume = {3661}, year = {2005} } @article{Page2002, abstract = {In the Ultimatum Game, two players are asked to split a prize. The first player, the proposer, makes an offer of how to split the prize. The second player, the responder, either accepts the offer, in which case the prize is split as agreed, or rejects it, in which case neither player receives anything. The rational strategy suggested by classical game theory is for the proposer to offer the smallest possible positive share and for the responder to accept. Humans do not play this way, however, and instead tend to offer 50\% of the prize and to reject offers below 20\%. Here we study the Ultimatum Game in an evolutionary context and show that empathy can lead to the evolution of fairness. Empathy means that individuals make offers which they themselves would be prepared to accept.}, author = {Page, Karen M and Nowak, Martin a}, doi = {10.1006/bulm.2002.0321}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Page, Nowak - 2002 - Empathy leads to fairness.pdf:pdf}, issn = {0092-8240}, journal = {Bulletin of mathematical biology}, keywords = {Biological Evolution,Choice Behavior,Empathy,Games, Experimental,Humans,Models, Psychological,Social Behavior}, month = nov, number = {6}, pages = {1101--16}, pmid = {12508533}, title = {{Empathy leads to fairness.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/12508533}, volume = {64}, year = {2002} } @article{O'Brien2010, author = {O'Brien, Heather L. and Toms, Elaine G.}, doi = {10.1002/asi}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/O'Brien, Toms - 2010 - The development and evaluation of a survey to measure user engagement.pdf:pdf}, journal = {Journal of the American Society for Information Science and Technology}, number = {1}, pages = {50--69}, title = {{The development and evaluation of a survey to measure user engagement}}, url = {http://onlinelibrary.wiley.com/doi/10.1002/asi.21229/full}, volume = {61}, year = {2010} } @article{Heylen2005, abstract = {When involved in face-to-face conversations, people move their heads in typical ways. The pattern of head gestures and their function in conversation has been studied in various disciplines. Many factors are involved in determining the exact patterns that occur in conversation. These can be explained by considering some of the basic properties of face-to-face interactions. The fact that conversations are a type of joint activity involving social actions together with a few other properties, such as the need for grounding, can explain the variety in functions that are served by the multitude of movements that people display during conversations.}, author = {Heylen, Dirk}, editor = {Halle, L and Wallis, P and Woods, S and Marsella, S and Pelachaud, C and Heylen, Dirk}, isbn = {1902956492}, journal = {Source}, pages = {45--52}, publisher = {The Society for the Study of Artificial Intelligence and the Simulation of Behaviour}, title = {{Challenges Ahead Head movements and other social acts in conversations}}, url = {http://www.aisb.org.uk/publications/proceedings/aisb05/10\_Virt\_Final.pdf}, year = {2005} } @article{Zajonc1989, author = {Zajonc, R B and Murphy, S T and Inglehart, M}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Zajonc, Murphy, Inglehart - 1989 - Feeling and facial efference implications of the vascular theory of emotion.pdf:pdf}, issn = {0033-295X}, journal = {Psychological review}, keywords = {Adult,Brain,Brain: blood supply,Cerebral Arteries,Cerebral Arteries: physiology,Cerebral Veins,Cerebral Veins: physiology,Emotions,Emotions: physiology,Face,Face: blood supply,Facial Expression,Facial Expression: physiology,Humans,Middle Aged,Muscle Contraction,Regional Blood Flow}, month = jul, number = {3}, pages = {395--416}, pmid = {2756066}, title = {{Feeling and facial efference: implications of the vascular theory of emotion.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/2756066}, volume = {96}, year = {1989} } @inproceedings{Krinidis2003, address = {Crete, Greece}, author = {Krinidis, Stelios and Buciu, Ioan and Pitas, Ioannis}, booktitle = {10th International Conference on Human- Computer Interaction (HCI'03)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Krinidis, Buciu, Pitas - 2003 - Facial expression analysis and synthesis A survey.pdf:pdf}, pages = {22--27}, title = {{Facial expression analysis and synthesis: A survey}}, url = {http://pdf.aminer.org/000/368/466/multiscale\_facial\_expression\_recognition\_using\_convolutional\_neural\_networks.pdf}, year = {2003} } @article{Litvack-Miller1997, abstract = {This study was an investigation of the structure and development of dispositional empathy during middle childhood and its relationship to altruism. A sample of 478 students from 2nd, 4th, and 6th grades completed an altruism questionnaire and a social desirability scale, both created for this study, and the Interpersonal Reactivity Index (Davis, 1980), adapted for this study. Teachers also rated the students on prosocial behaviors, such as sharing. In addition, as an experimental part of the study, the children could make monetary donations and volunteer time to raise funds. Results of a confirmatory factor analysis on the Interpersonal Reactivity Index supported Davis's (1980) findings that empathy comprises four components: perspective taking, fantasy, empathic concern, and personal distress. Factor intercorrelations, however, were not the same as those reported by Davis. MANOVAs were used to examine gender and age effects on empathy. Girls were more empathic in general than boys, and older children showed more empathic concern than younger children. Only empathic concern and perspective taking were significant predictors of prosocial behavior.}, author = {Litvack-Miller, W and McDougall, D and Romney, D M}, doi = {10.1037/0022-3514.45.6.1299}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Litvack-Miller, McDougall, Romney - 1997 - The structure of empathy during middle childhood and its relationship to prosocial behavior.pdf:pdf}, issn = {8756-7547}, journal = {Genetic, social, and general psychology monographs}, keywords = {Adolescent,Altruism,Child,Empathy,Female,Humans,Interpersonal Relations,Male,Questionnaires,Social Behavior,Social Desirability}, month = aug, number = {3}, pages = {303--24}, pmid = {9259121}, title = {{The structure of empathy during middle childhood and its relationship to prosocial behavior.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/21981037}, volume = {123}, year = {1997} } @article{Rabiner1989, author = {Rabiner, Lawrence R.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rabiner - 1989 - A tutorial on hidden Markov models and selected applications in speech recognition.pdf:pdf}, journal = {Proceedings of the IEEE}, number = {2}, pages = {257--286}, title = {{A tutorial on hidden Markov models and selected applications in speech recognition}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=18626}, volume = {77}, year = {1989} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @inproceedings{Jennings2000, author = {Jennings, Morgan}, booktitle = {Proceedings of the 2000 ACM SIGCPR conference on Computer Personnel Research}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Jennings - 2000 - Theory and models for creating engaging and immersive ecommerce websites.pdf:pdf}, keywords = {aesthetic experince,cognitive aesthetics,flow}, pages = {77--85}, publisher = {ACM}, title = {{Theory and models for creating engaging and immersive ecommerce websites}}, url = {http://dl.acm.org/citation.cfm?id=333358}, year = {2000} } @misc{Elearning-companion.com2013, author = {Elearning-companion.com}, title = {{The Disadvantages of Online Learning}}, url = {Elearning-companion.com}, urldate = {2013-12-17}, year = {2013} } @inproceedings{Samsonovich2006, abstract = {The notion of a human value system can be quantified as a cognitive map, the dimensions of which capture the semantics of concepts and the associated values. This can be done, if one knows (i) how to define the dimensions of the map, and (ii) how to allocate concepts in those dimensions. Regarding the first question, experimental studies with linguistic material using psychometrics have revealed that valence, arousal and dominance are primary dimensions characterizing human values. The same or similar dimensions are used in popular models of emotions and affects. In these studies, the choice of principal dimensions, as well as scoring concepts, was based on subjective reports or psycho-physiological measurements. Can a cognitive map of human values be constructed without testing human subjects? Here we show that the answer is positive, using generally available dictionaries of synonyms and antonyms. By applying a simple statistical-mechanic model to English and French dictionaries, we constructed multidimensional cognitive maps that capture the semantics of words. We calculated the principal dimensions of the resultant maps and found their semantics consistent across two languages as well as with previously known main cognitive dimensions. These results suggest that the linguistically derived cognitive map of the human value system is language-invariant and, being closely related to psychometrically derived maps, is likely to reflect fundamental aspects of the human mind.}, author = {Samsonovich, A V and Ascoli, G A}, booktitle = {Proc AGI Workship Advances in Artificial General Intelligence Concepts Architectures and Algorithms}, pages = {111--124}, title = {{Cognitive map dimensions of the human value system extracted from natural language}}, year = {2006} } @article{Sayette2001, abstract = {The Facial Action Coding System (FACS) (Ekman \& Friesen, 1978) is a comprehensive and widely used method of objectively describing facial activity. Little is known, however, about inter-observer reliability in coding the occurrence, intensity, and timing of individual FACS action units. The present study evaluated the reliability of these measures. Observational data came from three independent laboratory studies designed to elicit a wide range of spontaneous expressions of emotion. Emotion challenges included olfactory stimulation, social stress, and cues related to nicotine craving. Facial behavior was video-recorded and independently scored by two FACS-certified coders. Overall, we found good to excellent reliability for the occurrence, intensity, and timing of individual action units and for corre- sponding measures of more global emotion-specified combinations.}, author = {Sayette, MA and Cohn, Jeffrey F and Wertz, JM}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sayette, Cohn, Wertz - 2001 - A psychometric evaluation of the facial action coding system for assessing spontaneous expression.pdf:pdf}, journal = {Journal of Nonverbal Behavior}, keywords = {FACS,facial expression,reliability}, number = {3}, pages = {167--185}, title = {{A psychometric evaluation of the facial action coding system for assessing spontaneous expression}}, url = {http://www.springerlink.com/index/h6g98m62j8r3up62.pdf}, volume = {25}, year = {2001} } @article{Miller1990, abstract = {WordNet is an on-line lexical reference system whose design is inspired by current psycholinguistic theories of human lexical memory. English nouns, verbs, and adjectives are organized into synonym sets, each representing one underlying lexical concept. Different relations link the synonym sets.}, author = {Miller, George A. and Beckwith, Richard and Fellbaum, Christiane and Gross, Derek and Miller, Katherine J}, doi = {10.1093/ijl/3.4.235}, isbn = {0950384614774577}, issn = {09503846}, journal = {International Journal of Lexicography}, number = {4}, pages = {235--244}, pmid = {15102489}, publisher = {Oxford Univ Press}, title = {{Introduction to WordNet: An On-line Lexical Database}}, url = {http://ijl.oxfordjournals.org/cgi/doi/10.1093/ijl/3.4.235}, volume = {3}, year = {1990} } @book{Arnold1960, address = {New York}, author = {Arnold, M. B}, publisher = {Columbia University Press}, title = {{Emotion and personality}}, year = {1960} } @inproceedings{Amini2013, abstract = {We present HapFACS (ver. beta), a new open source software and API for generating FACS-based facial expres- sions on 3D virtual characters that have accompanying lip- synchronized animation abilities. HapFACS has two main usage scenarios: First, with the HapFACS software, users can generate repertoires of realistic FACS-validated facial expressions, either as static images or as videos; Second, with the accessible HapFACS API, users can animate speaking virtual characters with real-time realistic facial expressions, and embed these expressive characters in their own application(s) without any prior experience in computer graphics and modeling. We describe how HapFACS (1) provides control over 49 FACS Action Units at all levels of intensity; (2) enables the animation of faces with a single AU or a composition of AUs, activated unilaterally or bilaterally; and (3) can be applied to any supported character in the underlying 3D-character system1. Finally, we provide details of evaluation experiments we conducted with FACS-certified scorers to validate the facial expressions generated by HapFACS.}, address = {Geneva, Switzerland}, author = {Amini, Reza and Lisetti, Christine}, booktitle = {The fifth biannual Humaine Association Conference on Affective Computing and Intelligent Interaction (ACII 2013)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Amini, Lisetti - 2013 - HapFACS an Open Source API Software to Generate FACS-Based Expressions for ECAs Animation and for Corpus Gener.pdf:pdf}, publisher = {IEEE Computer Society}, title = {{HapFACS : an Open Source API / Software to Generate FACS-Based Expressions for ECAs Animation and for Corpus Generation}}, year = {2013} } @article{Schneier2011, author = {Schneier, B.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Schneier - 2011 - Empathy and Security.pdf:pdf}, journal = {Security \& Privacy, IEEE}, number = {5}, pages = {88--88}, publisher = {IEEE}, title = {{Empathy and Security}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=6029366}, volume = {9}, year = {2011} } @article{Wallraven2004, address = {New York, New York, USA}, author = {Wallraven, Christian and Cunningham, Douglas W. and Breidt, Martin and B\"{u}lthoff, Heinrich H.}, doi = {10.1145/1012551.1012603}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wallraven et al. - 2004 - View dependence of complex versus simple facial motions.pdf:pdf}, isbn = {1581139144}, journal = {Proceedings of the 1st Symposium on Applied perception in graphics and visualization - APGV '04}, pages = {181}, publisher = {ACM Press}, title = {{View dependence of complex versus simple facial motions}}, url = {http://portal.acm.org/citation.cfm?doid=1012551.1012603}, year = {2004} } @article{Suler2004, abstract = {While online, some people self-disclose or act out more frequently or intensely than they would in person. This article explores six factors that interact with each other in creating this online disinhibition effect: dissociative anonymity, invisibility, asynchronicity, solipsistic introjection, dissociative imagination, and minimization of authority. Personality variables also will influence the extent of this disinhibition. Rather than thinking of disinhibition as the revealing of an underlying "true self," we can conceptualize it as a shift to a constellation within self-structure, involving clusters of affect and cognition that differ from the in-person constellation.}, author = {Suler, John}, doi = {10.1089/1094931041291295}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Suler - 2004 - The online disinhibition effect.pdf:pdf}, issn = {1094-9313}, journal = {Cyberpsychology \& behavior : the impact of the Internet, multimedia and virtual reality on behavior and society}, keywords = {Acting Out,Affect,Communication,Dissociative Disorders,Dissociative Disorders: psychology,Humans,Imagination,Individuality,Inhibition (Psychology),Internet,Self Disclosure}, month = jun, number = {3}, pages = {321--6}, pmid = {15257832}, title = {{The online disinhibition effect.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/15257832}, volume = {7}, year = {2004} } @article{Silverman2001, abstract = {The goal of this research is to determine whether a computer based training game (HEART-SENSE) can improve recognition of heart attack symptoms and shift behavioral issues so as to reduce pre-hospitalization delay in seeking treatment. Since treatment delay correlates with adverse outcomes, this research could reduce myocardial infarction mortality and morbidity. In Phase I we created and evaluated a prototype virtual village in which users encounter and help convince synthetic personas to deal appropriately with a variety of heart attack scenarios and delay issues. Innovations made here are: (1) a design for a generic simulator package for promoting health behavior shifts, and (2) algorithms for animated pedagogical agents to reason about how their emotional state ties to patient condition and user progress. Initial results show that users of the game exhibit a significant shift in intention to call 9-1-1 and avoid delay, that multi-media versions of the game foster vividness and memory retention as well as a better understanding of both symptoms and of the need to manage time during a heart attack event. Also, results provide insight into areas where emotive pedagogical agents help and hinder user performance. Finally, we conclude with next steps that will help improve the game and the field of pedagogical agents and tools for simulated worlds for healthcare education and promotion.}, author = {Silverman, B G and Holmes, John and Kimmel, Stephan and Branas, Charles and Ivins, Doug and Weaver, Ransom and Chen, Yi}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Silverman et al. - 2001 - Modeling emotion and behavior in animated personas to facilitate human behavior change the case of the HEART-S.pdf:pdf}, issn = {1386-9620}, journal = {Health care management science}, keywords = {Algorithms,Behavior Therapy,Computer Simulation,Computer-Assisted Instruction,Emotions,Experimental,Games,Humans,Models,Myocardial Infarction,Myocardial Infarction: diagnosis,Myocardial Infarction: physiopathology,Myocardial Infarction: psychology,Patient Acceptance of Health Care,Patient Acceptance of Health Care: psychology,Psychological,Software,United States}, month = sep, number = {3}, pages = {213--28}, pmid = {11519847}, title = {{Modeling emotion and behavior in animated personas to facilitate human behavior change: the case of the HEART-SENSE game.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/11519847}, volume = {4}, year = {2001} } @article{Hogan1969, author = {Hogan, R}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hogan - 1969 - Development of an empathy scale.pdf:pdf}, issn = {0022-006X}, journal = {Journal of consulting and clinical psychology}, keywords = {Emotions,Humans,MMPI,Morals,Personality Assessment,Personality Inventory,Social Behavior,Social Perception,Social Values,Socialization}, month = jun, number = {3}, pages = {307--16}, pmid = {4389335}, title = {{Development of an empathy scale.}}, volume = {33}, year = {1969} } @article{Gupta2012, author = {Gupta, Prabodh and Jhala, Darshana and Jhala, Nirag}, doi = {10.1309/AJCPLAE62CRYYXNW}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gupta, Jhala, Jhala - 2002 - Book review.pdf:pdf}, issn = {1943-7722}, journal = {American journal of clinical pathology}, month = jan, number = {1}, pages = {160}, pmid = {22180490}, title = {{Book review.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/22180490}, volume = {137}, year = {2002} } @inproceedings{Bosse2010, abstract = {In order to enhance user involvement in financial services, this paper proposes to combine the idea of adaptive personalisation with intelligent virtual agents. To this end, a computational model for human decision making in financial context is incorporated within an intelligent virtual agent. To test whether the agent enhances user involvement, a web application has been developed, in which users have to make a number of investment decisions. This application has been evaluated in an experiment for a number of participants interacting with the system and afterwards providing their judgement by means of a questionnaire. The preliminary results indicate that the virtual agent can show appropriate emotional expressions related to states like happiness, greed and fear, and has high potential to enhance user involvement.}, author = {Bosse, Tibor and Siddiqui, Ghazanfar F and Treur, Jan}, booktitle = {IVA'10 Proceedings of the 10th international conference on Intelligent virtual agents}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bosse, Siddiqui, Treur - 2010 - An Intelligent Virtual Agent to Increase Involvement in Financial Services.pdf:pdf}, keywords = {adaptive personalisation.,finance,greed and risk,user involvement}, pages = {378--384}, publisher = {Springer-Verlag Berlin, Heidelberg}, title = {{An Intelligent Virtual Agent to Increase Involvement in Financial Services}}, year = {2010} } @article{Woods1970, abstract = {The use of augmented transition network grammars for the analysis of natural language sentences is described. Struc- ture-building actions associated with the arcs of the gram- mar network allow for the reordering, restructuring, and copy- ing of constituents necessary to produce deep-structure repre- sentations of the type normally obtained from a transforma- tional analysis, and conditions on the arcs allow for a powerful selectivity which can rule out meaningless analyses and take advantage of semantic information to guide the parsing. The advantages of this model for natural language analysis are discussed in detail and illustrated by examples. An imple- mentation of an experimental parsing system for transition network grammars is briefly described.}, author = {Woods, W A}, doi = {10.1145/355598.362773}, editor = {Grosz, Barbara and Jones, Karen and Webber, Bonnie}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Woods - 1970 - Transition Network Grammars for Natural Language Analysis.pdf:pdf}, issn = {00010782}, journal = {Communications of the ACM}, number = {10}, pages = {591--606}, publisher = {ASSOC COMPUTING MACHINERY}, title = {{Transition Network Grammars for Natural Language Analysis}}, url = {http://portal.acm.org/citation.cfm?doid=355598.362773}, volume = {13}, year = {1970} } @inproceedings{Striegnitz2005, abstract = {When humans give route directions, they often use gestures to indicate the location of landmarks. The form of these gestures reflect one of several perspectives that speakers take when producing them. They may locate the landmark with respect to the speaker, with respect to the person following the route, or with respect to other landmarks. A corpus study shows that the perspective chosen is partly determined by the function of the discourse segment these gestures occur in. Since locating gestures are so prevalent in direction-giving, in this paper we address the kinds of dialogue information and knowledge representation that is needed to generate them automatically.}, address = {Delmenhorst, Germany}, author = {Striegnitz, Kristina and Tepper, Paul and Lovett, Andrew and Cassell, Justine}, booktitle = {Workshop on Spatial Language and Dialogue (5th Workshop on Language and Space)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Striegnitz et al. - 2005 - Knowledge representation for generating locating gestures in route directions.pdf:pdf}, number = {Section 2}, title = {{Knowledge representation for generating locating gestures in route directions}}, year = {2005} } @article{Poggi2000, abstract = {Our goal is to create an intelligent 3D agent able to send complex, natural messages to users and, in the future, to converse with them. We look at the relationship between the agents communicative intentions and the way that these intentions are expressed into verbal and nonverbal messages. In this paper, we concentrate on the study and generation of coordinated linguistic and gaze communicative acts. In this view we analyse gaze signals according to their functional meaning rather than to their physical actions. We propose a formalism where a communicative act is represented by two elements: a meaning (that corresponds to a set of goals and beliefs that the agent has the purpose to transmit to the interlocutor) and a signal, that is the nonverbal expression of that meaning. We also outline a methodology to generate messages that coordinate verbal with nonverbal signals.}, author = {Poggi, Isabella and Pelachaud, Catherine and {De Rosis}, Fiorella}, issn = {09217126}, journal = {Ai Communications}, number = {3}, pages = {169--181}, publisher = {IOS Press}, title = {{Eye communication in a conversational 3D synthetic agent}}, url = {http://portal.acm.org/citation.cfm?id=1216435.1216439}, volume = {13}, year = {2000} } @article{Varni2009, author = {Varni, Giovanna and Camurri, Antonio and Coletta, Paolo and Volpe, Gualtiero}, doi = {10.1109/CSE.2009.230}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Varni et al. - 2009 - Toward a Real-Time Automated Measure of Empathy and Dominance.pdf:pdf}, isbn = {978-1-4244-5334-4}, journal = {2009 International Conference on Computational Science and Engineering}, keywords = {Social signals, music, synchronisation}, pages = {843--848}, publisher = {Ieee}, title = {{Toward a Real-Time Automated Measure of Empathy and Dominance}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5283210}, year = {2009} } @inproceedings{Liu2003, abstract = {This paper presents a novel way for assessing the affective quali- ties of natural language and a scenario for its use. Previous ap- proaches to textual affect sensing have employed keyword spot- ting, lexical affinity, statistical methods, and hand-crafted models. This paper demonstrates a new approach, using large-scale real- world knowledge about the inherent affective nature of everyday situations (such as “getting into a car accident”) to classify sen- tences into “basic” emotion categories. This commonsense ap- proach has new robustness implications. Open Mind Commonsense was used as a real world corpus of 400,000 facts about the everyday world. Four linguistic models are combined for robustness as a society of commonsense-based affect recognition. These models cooperate and compete to clas- sify the affect of text. Such a system that analyzes affective qualities sentence by sen- tence is of practical value when people want to evaluate the text they are writing. As such, the system is tested in an email writing application. The results suggest that the approach is robust enough to enable plausible affective text user interfaces.}, address = {New York, New York, USA}, author = {Liu, Hugo and Lieberman, Henry and Selker, Ted}, booktitle = {Proceedings of the 8th international conference on Intelligent user interfaces - IUI '03}, doi = {10.1145/604050.604067}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Liu, Lieberman, Selker - 2003 - A model of textual affect sensing using real-world knowledge.pdf:pdf}, isbn = {1581135866}, keywords = {Affective computing,Open Mind Commonsense,affective UI,commonsense reasoning,emotions,story understanding}, pages = {125}, publisher = {ACM Press}, title = {{A model of textual affect sensing using real-world knowledge}}, url = {http://portal.acm.org/citation.cfm?doid=604045.604067}, year = {2003} } @inproceedings{Febretti2009, address = {Boston, USA}, author = {Febretti, Alessandro and Garzotto, Franca}, booktitle = {CHI'09 Extended Abstracts on Human Factors in Computing Systems}, doi = {10.1145/1520340.1520618}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Febretti, Garzotto - 2009 - Usability, playability, and long-term engagement in computer games.pdf:pdf}, keywords = {acm classification keywords,correlation,engagement,heuristic evaluation,interactive game,playability,usability,user testing}, pages = {4063--4068}, publisher = {ACM}, title = {{Usability, playability, and long-term engagement in computer games}}, url = {http://dl.acm.org/citation.cfm?id=1520618}, year = {2009} } @article{Davidson1986, author = {Davidson, R and Raistrick, D}, journal = {British journal of addiction}, keywords = {adolescent,adult,age factors,aged,alcoholism,humans,middle aged,questionnaires,self disclosure}, number = {2}, pages = {217--222}, pmid = {3458489}, title = {{The validity of the Short Alcohol Dependence Data (SADD) Questionnaire: a short self-report questionnaire for the assessment of alcohol dependence.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/3458489}, volume = {81}, year = {1986} } @article{VanDerSchalk2011, abstract = {We report two studies validating a new standardized set of filmed emotion expressions, the Amsterdam Dynamic Facial Expression Set (ADFES). The ADFES is distinct from existing datasets in that it includes a face-forward version and two different head-turning versions (faces turning toward and away from viewers), North-European as well as Mediterranean models (male and female), and nine discrete emotions (joy, anger, fear, sadness, surprise, disgust, contempt, pride, and embarrassment). Study 1 showed that the ADFES received excellent recognition scores. Recognition was affected by social categorization of the model: displays of North-European models were better recognized by Dutch participants, suggesting an ingroup advantage. Head-turning did not affect recognition accuracy. Study 2 showed that participants more strongly perceived themselves to be the cause of the other's emotion when the model's face turned toward the respondents. The ADFES provides new avenues for research on emotion expression and is available for researchers upon request. (PsycINFO Database Record (c) 2011 APA, all rights reserved).}, author = {{Van Der Schalk}, Job and Hawk, Skyler T and Fischer, Agneta H and Doosje, Bertjan}, institution = {Department of Social Psychology.}, journal = {Emotion Washington Dc}, keywords = {adolescent,adult,arousal,emotional intelligence,emotions,facial expression,female,head movements,humans,male,photic stimulation,psychological techniques,psychological techniques standards,recognition (psychology),reproducibility results,young adult}, number = {4}, pages = {907--920}, title = {{Moving faces, looking places: Validation of the Amsterdam Dynamic Facial Expression Set (ADFES).}}, url = {http://dx.doi.org/10.1037/a0023853}, volume = {11}, year = {2011} } @article{Luneski2008, abstract = {The area of affective computing has received significant attention by the research community over the last few years. In this paper we review the underlying principles in the field, in an effort to draw threads for possible future development within medical informatics. The approach is lead by considering the three main affective channels, namely, visual, audio/speech, and physiological in relation to e-health, emotional intelligence and e-learning. A discussion on the importance of past and present applications together with a prediction on future literature output is also provided.}, author = {Luneski, Andrej and Bamidis, Panagiotis D. and Hitoglou-Antoniadou, Madga}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Luneski, Bamidis, Hitoglou-Antoniadou - 2008 - Affective Computing and Medical Informatics State Of The Art in Emotion-Aware Medical App.pdf:pdf}, issn = {0926-9630}, journal = {Health Technology and Informatics}, keywords = {Artificial Intelligence,Emotions,Humans,Medical Informatics Computing,Software,User-Computer Interface}, month = jan, pages = {517--22}, pmid = {18487783}, title = {{Affective Computing and Medical Informatics: State Of The Art in Emotion-Aware Medical Applications}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18487783 http://www.hst.aau.dk/~ska/MIE2008/ParalleSessions/PapersForDownloads/05.HCI\%26IM/SHTI136-0517.pdf}, volume = {136}, year = {2008} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{Koolagudi2012, author = {Koolagudi, Shashidhar G. and Rao, K. Sreenivasa}, doi = {10.1007/s10772-011-9125-1}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Koolagudi, Rao - 2012 - Emotion recognition from speech a review.pdf:pdf}, issn = {1381-2416}, journal = {International Journal of Speech Technology}, keywords = {classification models,corpus,elicited speech corpus,emotion recognition,excitation source features,natural speech,prosodic features,simulated emotional,speech corpus,system features}, month = jan, number = {2}, pages = {99--117}, title = {{Emotion recognition from speech: a review}}, url = {http://link.springer.com/10.1007/s10772-011-9125-1}, volume = {15}, year = {2012} } @inproceedings{Lopez2007, abstract = {In this paper we present validation tests that we have carried out on gestures that we have designed for an embodied conver- sational agent (ECAs), to assess their soundness with a view to applying said gestures in a forthcoming experiment to explore the possibilities ECAs can offer to overcome typical robustness problems in spoken language dialogue systems (SLDSs). The paper is divided into two parts: First we carry our a literature review to acquire a sense of the extent to which ECAs can help overcome user frustration during human-machine interaction. Then we associate tentative, yet specific, ECA gestural behaviour with each of the main dialogue stages, with special emphasis on problem situations. In the second part we describe the tests we have carried out to validate our ECA’s gestural repertoire. The results obtained show that users generally understand and naturally accept the ges- tures, to a reasonable degree. This encour- ages us to proceed with the next stage of research: evaluating the gestural strategy in real dialogue situations with the aim of learning about how to favour a more effi- cient and pleasant dialogue flow for the us- ers.}, address = {Prague, Czech Republic}, author = {L\'{o}pez, Beatriz and Hern\'{a}ndez, \'{A}lvaro and D\'{\i}az, David and Fern\'{a}ndez, Rub\'{e}n and Hern\'{a}ndez, Luis and Torre, Doroteo}, booktitle = {Proceedings of the Workshop on Embodied Language Processing}, doi = {10.3115/1610065.1610074}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/L\'{o}pez et al. - 2007 - Design and validation of ECA gestures to improve dialogue system robustness.pdf:pdf}, pages = {67--74}, publisher = {Association for Computational Linguistics}, title = {{Design and validation of ECA gestures to improve dialogue system robustness}}, url = {http://portal.acm.org/citation.cfm?doid=1610065.1610074}, year = {2007} } @phdthesis{Boukricha2013, author = {Boukricha, Hana}, school = {Bielefeld: Bielefeld University}, title = {{Simulating empathy in virtual humans}}, type = {Ph.D. Thesis}, year = {2013} } @article{Butow1997, abstract = {BACKGROUND: While the importance of providing individualised communication to cancer patients is now well recognised, little is known about the stability and validity of patients' expressed preferences for information and involvement in decision-making. This study explored the stability and possible predictors of such preferences over time. PATIENTS AND METHODS: Cancer patients seeing two Medical Oncologists in an out-patient clinic at an Australian teaching hospital completed a questionnaire battery before and directly after one consultation, and before their next consultation. Eighty consecutive patients with heterogeneous cancers participated in the study. Preferences for general and specific information, involvement and support were elicited at each assessment. Locus of control and patient familiarity with the clinic were measured before the first consultation. Patient satisfaction with the consultation was assessed directly after the consultation. Demographic and disease data were recorded for each patient. RESULTS: General preferences for information and involvement were relatively stable, at least in the short term; however there was considerable variability in preferences for specific topics of information. Patients whose condition had recently worsened were more likely to want progressively less involvement in decision-making. Gender, the doctor seen and religion were also predictive of patient preferences. CONCLUSIONS: Situational factors, such as change in disease status, may alter a patient's preferences for information and involvement. If we wish to match the provision of information and support to the expressed needs of patients, we must ask patients at each consultation what those needs are.}, author = {Butow, P N and Maclean, M and Dunn, S M and Tattersall, M H and Boyer, M J}, institution = {Department of Medicine, University of Sydney, New South Wales, Australia.}, journal = {Annals of oncology official journal of the European Society for Medical Oncology ESMO}, number = {9}, pages = {857--863}, pmid = {9358935}, title = {{The dynamics of change: cancer patients' preferences for information, involvement and support.}}, url = {http://annonc.oxfordjournals.org/cgi/content/abstract/8/9/857}, volume = {8}, year = {1997} } @article{Squier1990, abstract = {Empathic understanding in practitioner relationships is postulated as necessary for adherence to therapeutic regimens. It is considered to be one of the most important practitioner relationship skills leading ultimately to patient health benefit. Research literature from a wide-range of health disciplines including personality theory, social psychology, psychotherapy, psycho-analysis, and practitioner-patient communication highlights the key role of empathic processes in personal health care. A model of empathic understanding is described which attempts to integrate the substantive findings in the research literature and seeks to generate new ideas for further investigation. The model addresses theoretical relationships between practitioners' empathic understanding, patients' knowledge of their illness and motivation to get better, adherence to treatment advice, and outcome. Recent work on the selection and training of medical and nursing staff in empathic skills is reviewed. A number of areas for future research are outlined including the effect of individual practitioner differences in the components of empathy, empathic compatibility in practitioner-patient dyads, fluctuations in levels of practitioner empathy during long-term care, specific practitioner behaviours which communicate empathy, and the relationship between factors of patient satisfaction and the perception of empathic understanding.}, author = {Squier, Roger W}, issn = {02779536}, journal = {Social Science \& Medicine}, keywords = {adherence,empathy,patient satisfaction,practitioner patient relationships}, number = {3}, pages = {325--339}, title = {{A model of empathic understanding and adherence to treatment regimens in practitioner-patient relationships}}, url = {http://www.sciencedirect.com/science/article/B6VBF-466KMV9-HP/2/fe5f8570e4886876f22bbc3f8c5ce6a2}, volume = {30}, year = {1990} } @article{DiMatteo1980, abstract = {The relationship between physicians' nonverbal communication skills (their ability to communicate and to understand facial expression, body movement and voice tone cues to emotion) and their patients' satisfaction with medical care was examined in 2 studies. The research involved 71 residents in internal medicine and 462 of their ambulatory and hospitalized patients. Standardized, reliable and valid measures of nonverbal communication skills were administered to the physicians. Their scores on these tests were correlated with ratings they received from a sample of their patients on measures of satisfaction with the technical aspects and the socioemotional aspects (or art) of the medical care they received. While the nonverbal communication skills of the physicians bore little relationship to patients' ratings of the technical quality of care, measures of these skills did predict patient satisfaction with the art of medical care received. Across both samples, physicians who were more sensitive to body movement and posture cues to emotion (the channel suggested by nonverbal researchers as the one in which true affect can be perceived) received higher ratings from their patients on the art of care than did less sensitive physicians. In addition, physicians who were successful at expressing emotion through their nonverbal communications tended to receive higher ratings from patients on the art of care than did physicians who were less effective communicators. The implications of successfully identifying characteristics of physicians with whom patients are satisfied are discussed.}, author = {DiMatteo, M R and Taranta, A and Friedman, H S and Prince, L M}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/DiMatteo et al. - 1980 - Predicting patient satisfaction from physicians' nonverbal communication skills.pdf:pdf}, issn = {0025-7079}, journal = {Medical care}, keywords = {Adult,Consumer Satisfaction,Evaluation Studies as Topic,Female,Humans,Male,Nonverbal Communication,Physician-Patient Relations}, month = apr, number = {4}, pages = {376--387}, pmid = {7401698}, title = {{Predicting patient satisfaction from physicians' nonverbal communication skills}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/7401698}, volume = {18}, year = {1980} } @article{Lien1998, author = {Lien, James J and Cohn, Jeffrey F and Kanade, Takeo and Li, Ching-Chung}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lien et al. - 1998 - Automated Facial Expression Recognition Based on FACS Action Units University of Pittsburgh Takeo Kanade Vision and.pdf:pdf}, journal = {IEEE Proceedings of FG'98}, title = {{Automated Facial Expression Recognition Based on FACS Action Units University of Pittsburgh Takeo Kanade Vision and Autonomous Systems Center Ching-Chung Li}}, year = {1998} } @inproceedings{Heerink2009, author = {Heerink, Marcel and Krose, B. and Evers, Vanessa and Wielinga, Bob}, booktitle = {Robot and Human Interactive Communication, 2009. RO-MAN 2009. The 18th IEEE International Symposium on}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Heerink et al. - 2009 - Measuring acceptance of an assistive social robot a suggested toolkit.pdf:pdf}, pages = {528--533}, publisher = {IEEE}, title = {{Measuring acceptance of an assistive social robot: a suggested toolkit}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5326320}, year = {2009} } @misc{Wiley1996, abstract = {A tactile interface system which provides a method for entering user position into a virtual reality and providing tactile feedback from that virtual reality. This system will allow for the representation of the user limb position and enable the user to feel interactions with objects represented in the virtual reality. The system also provides for the recording of tactile information to accompany audio and visual recordings.}, author = {Wiley, Jack W. and Shaw, Christopher D.}, title = {{Tactile Interface Apparatus For Providing Physical Feedback To A User Based On An Interaction With A Virtual Environment}}, year = {1996} } @inproceedings{Navalpakkam2012, address = {Austin, Texas}, author = {Navalpakkam, Vidhya and Churchill, E}, booktitle = {Proceedings of ACM Conference on Human Factors in Computing Systems (SIGCHI)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Navalpakkam, Churchill - 2012 - Mouse tracking measuring and predicting users' experience of web-based content.pdf:pdf}, isbn = {9781450310154}, pages = {2963--2972}, publisher = {ACM}, title = {{Mouse tracking: measuring and predicting users' experience of web-based content}}, url = {http://dl.acm.org/citation.cfm?id=2208705}, year = {2012} } @inproceedings{Ochs2008, abstract = {Recent research has shown that virtual agents expressing empathic emotions toward users have the potentiality to en- hance human-machine interaction. To identify under which circumstances a virtual agent should express empathic emo- tions, we have analyzed real human-machine dialog situa- tions that have led users to express emotion. The results of this empirical study have been combined with theoretical descriptions of emotions to construct a model of empathic emotions. Based on this model, a module of emotions has been implemented as a plug-in for JSA agents. It determines the empathic emotions (their types and intensity) of such agents in real time. It has been used to develop a demon- strator where users can interact with an empathic dialog agent to obtain information on their emails. An evaluation of this agent has enabled us to both validate the proposed model of empathic emotions and highlight the positive user’s perception of the virtual agent.}, address = {Estoril, Portugal}, author = {Ochs, Magalie and Pelachaud, Catherine and Sadek, David}, booktitle = {Proceeding of 7th International Conference on Autonomous Agents and Multiagent Systems (AAMAS2008)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ochs, Pelachaud, Sadek - 2008 - An empathic virtual dialog agent to improve human-machine interaction.pdf:pdf}, pages = {89--96}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems (www.ifaamas.org)}, title = {{An empathic virtual dialog agent to improve human-machine interaction}}, url = {http://jmvidal.cse.sc.edu/library/AAMAS-08/proceedings/pdf/paper/AAMAS08\_0526.pdf}, year = {2008} } @article{Noller1985, abstract = {This paper reviews the literature on the complex question of the relative importance of the verbal, visual and vocal channels in various types of judgments. It is noted that a wide variety of methodologies are used in such research with studies differing in terms of the type of stimuli used (varying on the dimension of stylised to naturally occurring), the task required of the subjects (particularly varying on the cognitive-affective dimension) and the method used to assess the relative importance of the channels. An attempt is made to assess the important variables which affect the way the various channels are used by decoders, including whether deception is involved or expected, whether the message is discrepant, the particular judgment being made and the dimension on which the stimulus varies, the sex of the encoder and the decoder and the relationship between them, and the age of the decoder. The possibility of other related variables also acting as moderators is discussed.}, author = {Noller, Patricia}, doi = {10.1007/BF00987557}, issn = {01915886}, journal = {Journal of Nonverbal Behavior}, number = {1}, pages = {28--47}, publisher = {Springer Netherlands}, title = {{Video primacy? A further look}}, url = {http://www.springerlink.com/index/10.1007/BF00987557}, volume = {9}, year = {1985} } @article{Pantic2000, author = {Pantic, Maja and Member, Student and Rothkrantz, Leon J. M.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pantic, Member, Rothkrantz - 2000 - Automatic Analysis of Facial Expressions The State of the Art.pdf:pdf}, journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, number = {12}, pages = {1424--1445}, title = {{Automatic Analysis of Facial Expressions: The State of the Art}}, volume = {22}, year = {2000} } @article{Chovil1991, abstract = {The article focuses on facial expressions exhibited during conversation. Although facial displays are undoubtedly used at times to convey information about how a person is feeling or reacting, emotion displays do not account for the majority of displays that occur. A number of researchers have noted the use of facial displays in talk. Some facial displays serve linguistic functions. They could be used to mark out emphasis and other aspects of linguistic structure, serve as supplements to speech, or as listener commentaries. Brow movements serve as conversational signals. There are a number of ways that are used by both speakers and listeners to convey information in conversation. Observation of the use of facial displays in language has not been limited to spoken language. Some facial displays were found to serve linguistic functions for users of American Sign Language. Facial displays helped to mark introductions of topics, clauses, questions, and other syntactic constructions.}, author = {Chovil, Nicole}, journal = {Research on Language and Social Interaction}, keywords = {conversation,dialogue,expressions,facial,gestures}, pages = {163--194}, title = {{Discourse-Oriented Facial Displays in Conversation}}, volume = {25}, year = {1991} } @inproceedings{Thomaz2005, abstract = {Social referencing is the tendency to use the emotional reaction of another to help form one's own affective appraisal of a novel situation, which is then used to guide subsequent behavior. It is an important form of emotional communication and is a developmental milestone for human infants in their ability to learn about their environment through social means. In this paper, we present a biologically-inspired computational model of social referencing for our expressive, anthropomorphic robot that consists of three interacting systems: emotional empathy through facial imitation, a shared attention mechanism, and an affective memory system. This model presents opportunities for understanding how these mechanisms might interact to enable social referencing behavior in humans.}, author = {Thomaz, Andrea Lockerd and Berlin, Matt and Breazeal, Cynthia}, booktitle = {IEEE International Workshop on Robot and Human Interactive Communication (ROMAN 2005)}, doi = {10.1109/ROMAN.2005.1513844}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Thomaz, Berlin, Breazeal - 2005 - An Embodied Computational Model of Social Referencing Inspiration from Human Infants.pdf:pdf}, pages = {591 -- 598}, publisher = {IEEE}, title = {{An Embodied Computational Model of Social Referencing Inspiration from Human Infants}}, year = {2005} } @article{Goeleven2008, author = {Goeleven, E. and de Raedt, R. and Leyman, L. and Verschuere, B.}, doi = {10.1080/02699930701626582}, journal = {Cognition and Emotion}, pages = {1094--1118}, title = {{The Karolinska Directed Emotional Faces: A validation study}}, volume = {22}, year = {2008} } @article{Macdorman2010, author = {Macdorman, Karl F and Coram, Joseph A}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Macdorman, Coram - 2010 - Gender Differences in the Impact of Presentational Factors in Human Character Animation on.pdf:pdf}, journal = {Presence}, number = {3}, pages = {213--229}, title = {{Gender Differences in the Impact of Presentational Factors in Human Character Animation on}}, volume = {19}, year = {2010} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @incollection{Maaoui2010, author = {Maaoui, C and Pruski, A}, booktitle = {Cutting Edge Robotics}, chapter = {20}, editor = {Kordic, Vedran}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Maaoui, Pruski - 2010 - Emotion recognition through physiological signals for human-machine communication.pdf:pdf}, isbn = {978-953-307-062-9}, pages = {317--333}, publisher = {InTech}, title = {{Emotion recognition through physiological signals for human-machine communication}}, url = {http://www.intechopen.com/source/pdfs/12200/InTech-Emotion\_recognition\_through\_physiological\_signals\_for\_human\_machine\_communication.pdf}, year = {2010} } @article{DeRosis2003, abstract = {This paper describes the results of a research project aimed at implementing a 'realistic' 3D Embodied Agent that can be animated in real-time and is 'believable and expressive': that is, able to coherently communicate complex information through the combination and the tight synchronisation of verbal and nonverbal signals. We describe, in particular, how we 'animate' this Agent (that we called Greta) so as to enable her to manifest the affective states that are dynamically activated and de-activated in her mind during the dialog with the user. The system is made up of three tightly interrelated components: A representation of the Agent Mind: this includes long and short-term affective components (personality and emotions) and simulates how emotions are triggered and decay over time according to the Agent's personality and to the context, and how several emotions may overlap. Dynamic belief networks with weighting of goals is the formalism we employ to this purpose. A mark-up language to denote the communicative meanings that may be associated with dialog moves performed by the Agent. A translation of the Agent's tagged move into a face expression, that combines appropriately the available channels (gaze direction, eyebrow shape, head direction and movement etc). The final output is a 3-D facial model that respects the MPEG-4 standard and uses MPEG-4 Facial Animation Parameters to produce facial expressions. Throughout the paper, we illustrate the results obtained, with an example of dialog in the domain of 'Advice about eating disorders'. The paper concludes with an analysis of advantages of our cognitive model of emotion triggering and of the problems found in testing it. Although we did not yet complete a formal evaluation of our system, we briefly describe how we plan to assess the agent's believability in terms of consistency of its communicative behaviour. (C) 2003 Elsevier Science Ltd. All rights reserved.}, author = {{De Rosis}, F and Pelachaud, Catherine and Poggi, Isabella and Carofiglio, V and {De Carolis}, Berardina}, doi = {10.1016/S1071-5819(03)00020-X}, issn = {10715819}, journal = {International Journal of Human-Computer Studies}, number = {1-2}, pages = {81--118}, title = {{From Greta's mind to her face: modelling the dynamics of affective states in a conversational embodied agent}}, volume = {59}, year = {2003} } @incollection{Cooper2000, abstract = {This paper considers how research into empathy in teaching and learning can inform the research into intelligent systems and intelligent agents embedded in educational applications. It also relates this research to some analysis of classroom practice completed as part of the EU funded NIMIS project. The project is developing three applications, one of which aims to support writing development with young children aged 5-6 years based on a cartoon format. The NIMIS classroom as a whole is designed to enhance and augment existing classroom practices and to foster collaboration by non-intrusive hardware and intuitive hardware and software interfaces. To this end it seeks to enhance both human and electronic communication in the classroom. Empathy is central to ensuring the quality of human communication and personal development. This paper suggests that intelligent systems that can consider more carefully the processes and feelings involved in human interactions in teaching and learning, may promote higher quality support for students in classrooms.}, author = {Cooper, Bridget and Brna, Paul and Martins, Alex}, booktitle = {Affective Interactions Towards a New Generation of Computer Interfaces}, doi = {10.1007/10720296\_3}, editor = {Paiva, Ana}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cooper, Brna, Martins - 2000 - Effective affective in intelligent systems–building on evidence of empathy in teaching and learning.pdf:pdf}, isbn = {978-3-540-41520-6}, pages = {21--34}, publisher = {Springer Berlin / Heidelberg}, title = {{Effective affective in intelligent systems–building on evidence of empathy in teaching and learning}}, url = {http://www.springerlink.com/index/j8v0l230t3503367.pdf}, volume = {1814/2000}, year = {2000} } @inproceedings{Amini2013b, abstract = {In this paper, we discuss a novel approach for the computer-delivery of Brief Motivational Interventions (BMIs) for health behavior change. We describe the basic elements of our system architecture, and focus on enabling a multimodal Embodied Conversational Agent (ECA) to deliver the health behavior change interventions empathetically by adapting, in real- time, its verbal and non-verbal communication messages to those of its clients. The designed empathy model integrates a cognitive component and an affective components. We then discuss the evaluation experiment that we designed and conducted to evaluate the impact of empathy model on users’ experience with the empathic character. Results indicate that, in comparison with the non-empathic counselor, the empathic one is better accepted (e.g., more enjoyable, empathizing, engaging, and likable) and some users might be willing to disclose more private information (e.g., drinking habits) to the counselor endowed with empathic abilities than the one without.}, address = {Philadelphia}, author = {Amini, Reza and Lisetti, Christine and Yasavur, Ugan and Rishe, Naphtali}, booktitle = {IEEE International Conference on Healthcare Informatics 2013 (ICHI 2013)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Amini et al. - 2013 - On-Demand Virtual Health Counselor for Delivering Behavior-Change Health Interventions(2).pdf:pdf}, number = {1}, publisher = {IEEE}, title = {{On-Demand Virtual Health Counselor for Delivering Behavior-Change Health Interventions}}, year = {2013} } @misc{Shaw2000, abstract = {Methods and apparatuses described herein automate and confer additive properties to morphs (modification of a starting graphical image to a destination graphical image). The enhanced automated additive morphs created by this invention extend the currently limited scope of animation techniques, creating: moving morphs, where characters can speak, move, and emote during the morphing process; parametric character creation, where features can be sequentially added to a character to create a wide variety of resulting characters; behavioral transference, where character behavior can be automatically transferred to newly created characters, and behavioral layering whereby sequential behavior patterns can be concurrently transferred or imparted to a character. The present invention allows an animator to create, animate, control and transform two and three dimensional images instantaneously and fluidly. The invention provides a superior solution at significantly less cost which extends the range and properties of existing state of the art animation.}, author = {Shaw, Christopher D. and Wilson, Orion}, title = {{Methods and apparatuses for controlling transformation of two and three-dimensional images}}, year = {2000} } @article{Banziger2009, abstract = {Emotion recognition ability has been identified as a central component of emotional competence. We describe the development of an instrument that objectively measures this ability on the basis of actor portrayals of dynamic expressions of 10 emotions (2 variants each for 5 emotion families), operationalized as recognition accuracy in 4 presentation modes combining the visual and auditory sense modalities (audio/video, audio only, video only, still picture). Data from a large validation study, including construct validation using related tests (Profile of Nonverbal Sensitivity; Rosenthal, Hall, DiMatteo, Rogers, \& Archer, 1979; Japanese and Caucasian Facial Expressions of Emotion; Biehl et al., 1997; Diagnostic Analysis of Nonverbal Accuracy; Nowicki \& Duke, 1994; Emotion Recognition Index; Scherer \& Scherer, 2008), are reported. The results show the utility of a test designed to measure both coarse and fine-grained emotion differentiation and modality-specific skills. Factor analysis of the data suggests 2 separate abilities, visual and auditory recognition, which seem to be largely independent of personality dispositions.}, author = {B\"{a}nziger, Tanja and Grandjean, Didier and Scherer, Klaus R.}, doi = {10.1037/a0017088}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/B\"{a}nziger, Grandjean, Scherer - 2009 - Emotion recognition from expressions in face, voice, and body the Multimodal Emotion Recognition T.pdf:pdf}, issn = {1931-1516}, journal = {Emotion (Washington, D.C.)}, keywords = {Adolescent,Adult,Discrimination (Psychology),Emotional Intelligence,Emotions,Facial Expression,Female,Humans,Male,Nonverbal Communication,Pattern Recognition,Personality Inventory,Personality Inventory: statistics \& numerical data,Psychometrics,Psychometrics: statistics \& numerical data,Recognition (Psychology),Reproducibility of Results,Social Adjustment,Visual,Voice Quality,Young Adult}, month = oct, number = {5}, pages = {691--704}, pmid = {19803591}, title = {{Emotion recognition from expressions in face, voice, and body: the Multimodal Emotion Recognition Test (MERT).}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19803591}, volume = {9}, year = {2009} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{Steunebrink2011, abstract = {[1] B. R. Steunebrink, M. Dastani, and J.-J. C. Meyer, “A formal model of emotion triggers: an approach for BDI agents,” Synthese, vol. 185, no. S1, pp. 83–129, Sep. 2011.}, author = {Steunebrink, Bas R. and Dastani, Mehdi and Meyer, John-Jules Ch.}, doi = {10.1007/s11229-011-0004-8}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Steunebrink, Dastani, Meyer - 2011 - A formal model of emotion triggers an approach for BDI agents.pdf:pdf}, isbn = {1122901100}, issn = {0039-7857}, journal = {Synthese}, keywords = {cognitive modeling,intelligent agents,logic of emotions}, month = sep, number = {S1}, pages = {83--129}, title = {{A formal model of emotion triggers: an approach for BDI agents}}, url = {http://www.springerlink.com/index/10.1007/s11229-011-0004-8}, volume = {185}, year = {2011} } @article{Lakin2003b, author = {Lakin, J. L. and Chartrand, T. L.}, doi = {10.1111/1467-9280.14481}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lakin, Chartrand - 2003 - Using Nonconscious Behavioral Mimicry to Create Affiliation and Rapport.pdf:pdf}, issn = {0956-7976}, journal = {Psychological Science}, month = jul, number = {4}, pages = {334--339}, title = {{Using Nonconscious Behavioral Mimicry to Create Affiliation and Rapport}}, url = {http://pss.sagepub.com/lookup/doi/10.1111/1467-9280.14481}, volume = {14}, year = {2003} } @inproceedings{Paiva2004a, author = {Paiva, Ana and Dias, J. and Sobral, D. and Woods, S. and Hall, Lynne}, booktitle = {Workshop on Empathic Agents, AAMAS’04}, title = {{Building empathic life-like characters: the proximity factor}}, year = {2004} } @inproceedings{Huang2010, abstract = {Virtual humans are embodied software agents that should not only be realistic looking but also have natural and realistic behaviors. Traditional virtual human systems learn these interaction behaviors by observing how individuals respond in face-to-face situations (i.e., direct interaction). In contrast, this paper introduces a novel methodological approach called parasocial consensus sampling (PCS) which allows multiple individuals to vicariously experience the same situation to gain insight on the typical (i.e., consensus view) of human responses in social interaction. This approach can help tease apart what is idiosyncratic from what is essential and help reveal the strength of cues that elicit social responses. Our PCS approach has several advantages over traditional methods: (1) it integrates data from multiple independent listeners interacting with the same speaker, (2) it associates probability of how likely feedback will be given over time, (3) it can be used as a prior to analyze and understand the face-to-face interaction data, (4) it facilitates much quicker and cheaper data collection. In this paper, we apply our PCS approach to learn a predictive model of listener backchannel feedback. Our experiments demonstrate that a virtual human driven by our PCS approach creates significantly more rapport and is perceived as more believable than the virtual human driven by face-to-face interaction data.}, address = {Toronto, Canada}, author = {Huang, Lixing and Morency, Louis-Philippe and Gratch, Jonathan}, booktitle = {9th International Conference on Autonomous Agents and Multiagent Systems (AAMAS'2010)}, doi = {10.1145/1838206.1838371}, editor = {Hoek, Van Der and Kaminka and Lesperance and Luck and Sen}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Huang, Morency, Gratch - 2010 - Parasocial consensus sampling combining multiple perspectives to learn virtual human behavior.pdf:pdf}, keywords = {Backchannel Feedback,Parasocial,Rapport,Virtual Humans}, number = {Aamas}, pages = {10--14}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems (www.ifaamas.org)}, title = {{Parasocial consensus sampling: combining multiple perspectives to learn virtual human behavior}}, url = {http://dl.acm.org/citation.cfm?id=1838371}, year = {2010} } @article{Oatley1987, abstract = {A theory is proposed that emotions are cognitively based states which co-ordinate quasi-autonomous processes in the nervous system. Emotions provide a biological solution to certain problems of transition between plans, in systems with multiple goals. Their function is to accomplish and maintain these transitions, and to communicate them to ourselves and others. Transitions occur at significant junctures of plans when the evaluation of success in a plan changes. Complex emotions are derived from a small number of basic emotions and arise at junctures of social plans.}, author = {Oatley, Keith and Johnson-laird, P N}, doi = {10.1080/02699938708408362}, isbn = {0269993114640600}, issn = {02699931}, journal = {Cognition \& Emotion}, number = {1}, pages = {29--50}, publisher = {Psychology Press}, title = {{Towards a Cognitive Theory of Emotions}}, url = {http://www.tandfonline.com/doi/abs/10.1080/02699938708408362?journalCode=pcem20}, volume = {1}, year = {1987} } @article{Shields2005, abstract = {OBJECTIVES: To develop a reliable and valid computer coded measure to assess emotional expression from transcripts of physician-patient interactions. METHODS: Physician encounters with two standardized patients (SPs) were audiotaped. Fifty patients from each physician (n = 100 primary care physicians) completed surveys that assessed patients' perceptions of their relationships with physicians. Audio-recordings of 193 patient-physician encounters were transcribed and computer-coded to derive a percent emotion words, and research assistants completed the Measure of Patient-Centered Communication (MPCC). RESULTS: After adjustment for potential confounders, regression analyses revealed physicians' use of emotion words and the MPCC contribute independently to patients' and SPs' perceptions of their relationship with physicians. CONCLUSIONS: The computerized coding of emotion words shows promise as a reliable, valid, and simple method to code transcript data of physician-patient interactions. This method may be expanded to examine other aspects of physician language and does not require coder training.}, author = {Shields, Cleveland G and Epstein, Ronald M and Franks, Peter and Fiscella, Kevin and Duberstein, Paul and McDaniel, Susan H and Meldrum, Sean}, institution = {Department of Family Medicine, University of Rochester Medical Center, Rochester Center to Improve Communication in Health Care, 1381 South Avenue, Rochester, NY 14620-2830, USA. Cleveland\_Shields@URMC.Rochester.edu}, journal = {Patient Education and Counseling}, number = {2}, pages = {232--238}, pmid = {15911198}, publisher = {Elsevier}, title = {{Emotion language in primary care encounters: reliability and validity of an emotion word count coding system.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/15911198}, volume = {57}, year = {2005} } @inproceedings{Said2004, address = {New York, New York, USA}, author = {Said, Norma S.}, booktitle = {Proceeding of the 2004 conference on Interaction design and children building a community - IDC '04}, doi = {10.1145/1017833.1017873}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Said - 2004 - An engaging multimedia design model.pdf:pdf}, isbn = {1581137915}, keywords = {construct,designing for children,engagement,engaging factor for children,engaging multimedia,interaction types,interactivity,multimedia and children,simulation}, pages = {169--172}, publisher = {ACM Press}, title = {{An engaging multimedia design model}}, url = {http://portal.acm.org/citation.cfm?doid=1017833.1017873}, year = {2004} } @article{Jackson1992, abstract = {OBJECTIVE: The purpose of this paper is the assessment of the healer's listening as an aspect of the history of caring and curing, with particular attention to its place in psychological healing. METHOD: An extensive range of philosophical, religious, and medical sources from antiquity to the present were studied. RESULTS: Over the centuries, listening has been a crucial aspect of the various endeavors undertaken by healers in the interest of acquiring information from, achieving understanding of, and bringing about healing effects for sufferers. Yet it has been vision rather than hearing that has been emphasized in knowing and understanding, and looking rather than listening that has been emphasized in healing endeavors. Only around the turn of the twentieth century did there emerge the focused study of care in listening, of listening beyond the words themselves, and of the significance of the interested listener as a soothing, empathic force. CONCLUSIONS: The place of listening in depth and with empathy is a crucial element in healing. While the emphasis on looking remains significant in the gathering and appraisal of data, at times it threatens to overwhelm the need for an attentive and concerned listener. There appears to be a natural tension between the two modes that has, in modern times, been translated into a tension between the two modes that has, in modern times, been translated into a tension between a scientific mode of gaining information and a humanistic mode of knowing sufferers. A healer neglects either one at his or her peril-and at the peril of his or her patients.}, author = {Jackson, S W}, institution = {Department of Psychiatry, Yale University School of Medicine, New Haven, CT 06510.}, journal = {The American Journal of Psychiatry}, number = {12}, pages = {1623--1632}, pmid = {1443239}, title = {{The listening healer in the history of psychological healing.}}, volume = {149}, year = {1992} } @article{Pantic2003, abstract = {The ability to recognize affective states of a person we are communicating with is the core of emotional intelligence. Emotional intelligence is a facet of human intelligence that has been argued to be indispensable and perhaps the most important for successful interpersonal social interaction. This paper argues that next-generation human-computer interaction (HCI) designs need to include the essence of emotional intelligence - the ability to recognize a user's affective states-in order to become more human-like, more effective, and more efficient. Affective arousal modulates all nonverbal communicative cues (facial expressions, body movements, and vocal and physiological reactions). In a face-to-face interaction, humans detect and interpret those interactive signals of their communicator with little or no effort. Yet design and development of an automated system that accomplishes these tasks is rather difficult. This paper surveys the past work in solving these problems by a computer and provides a set of recommendations for developing the first part of an intelligent multimodal HCI-an automatic personalized analyzer of a user's nonverbal affective feedback.}, author = {Pantic, Maja and Rothkrantz, L J M}, doi = {10.1109/JPROC.2003.817122}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pantic, Rothkrantz - 2003 - Toward an affect-sensitive multimodal human-computer interaction.pdf:pdf}, issn = {00189219}, journal = {Proceedings of the IEEE}, number = {9}, pages = {1370--1390}, publisher = {IEEE}, title = {{Toward an affect-sensitive multimodal human-computer interaction}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=1230215}, volume = {91}, year = {2003} } @inproceedings{Mateas2003, abstract = {In this paper we discuss our research and development towards creating an architecture, and a story design using this architecture, that integrates a broad and shallow approach to natural language processing, a novel character authoring language and a novel drama manager, in order to build an interactive drama about human relationships.}, address = {San Jose, CA, UA}, author = {Mateas, Michael and Stern, Andrew}, booktitle = {Game Developers Conference Game Design track}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mateas, Stern - 2003 - Fa\c{c}ade An experiment in building a fully-realized interactive drama.pdf:pdf}, publisher = {Citeseer}, title = {{Fa\c{c}ade: An experiment in building a fully-realized interactive drama}}, url = {http://www.mendeley.com/research/faade-an-experiment-in-building-a-fullyrealized-interactive-drama/}, volume = {2}, year = {2003} } @incollection{Hoffman1987, address = {Cambridge}, author = {Hoffman, Martin L.}, booktitle = {Empathy and its development}, editor = {Eisenberg, N. and Strayer, J.}, pages = {47--80}, publisher = {Cambridge University Press.}, title = {{The contribution of empathy to justice and moral judgment}}, year = {1987} } @article{Elliot2001, author = {Elliot, Andrew J. and McGregor, holly A.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Elliot, McGregor - 2001 - A 2 x 2 achievement goal framework.pdf.pdf:pdf}, journal = {Journal of Personality and Social Psychology}, number = {3}, pages = {501--519}, title = {{A 2 x 2 achievement goal framework.pdf}}, volume = {80}, year = {2001} } @inproceedings{Ehrlich2000, abstract = {Designers of video-mediated communication and affective computing applications must make tradeoffs to deal with limited bandwidth. Typically spatial resolution and color are preserved at the expense of temporal resolution and accuracy. Our data suggest that this may not be the appropriate tradeoff for communicating facial affect; preserving motion is critical and may even compensate for major losses in image realism.}, address = {NY}, author = {Ehrlich, Sheryl M and Schiano, Diane J and Sheridan, Kyle}, booktitle = {Proceedings of ACM CHI 2000 Conference on Human Factors in Computing Systems}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ehrlich, Schiano, Sheridan - 2000 - Communicating Facial Affect It's Not the Realism, It's the Motion.pdf:pdf}, keywords = {Facial affect,face,facial expression of emotion,image degradation,nonverbal communication,video conferencing.}, pages = {252--253}, publisher = {ACM}, title = {{Communicating Facial Affect : It's Not the Realism, It's the Motion}}, year = {2000} } @book{Darwin1872, address = {London}, author = {Darwin, C.}, publisher = {Murray [Reprinted Chicago: University of Chicago Press, 1965]}, title = {{The Expression of Emotions in Man and Animals}}, year = {1872} } @article{McClave2000, abstract = {Speaker head movements pattern predictably and have semantic, discourse, and communicative functions. Some head movements convey propositional content, while others carry semantic meanings beyond affirmation and negation. Side-to-side shakes correlate with expressions of inclusivity and intensification. Lateral movements also co-occur with uncertain statements and lexical repairs. In narration, head movements serve to locate referents in abstract space. A change in head posture marks switches between direct and indirect discourse, and speaker head nods function as backchannel requests to which listeners are extraordinarily sensitive. These findings are based on the microanalysis of videotaped conversations between native speakers of American English.}, author = {McClave, Evelyn Z}, doi = {10.1016/S0378-2166(99)00079-X}, issn = {03782166}, journal = {Journal of Pragmatics}, keywords = {ameri,backchannel,gesture,head movements,kinesic,nonverbal,speech}, number = {7}, pages = {855--878}, publisher = {Elsevier}, title = {{Linguistic functions of head movements in the context of speech}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S037821669900079X}, volume = {32}, year = {2000} } @article{Gallese2003, abstract = {Starting from a neurobiological standpoint, I will propose that our capacity to understand others as intentional agents, far from being exclusively dependent upon mentalistic/linguistic abilities, be deeply grounded in the relational nature of our interactions with the world. According to this hypothesis, an implicit, prereflexive form of understanding of other individuals is based on the strong sense of identity binding us to them. We share with our conspecifics a multiplicity of states that include actions, sensations and emotions. A new conceptual tool able to capture the richness of the experiences we share with others will be introduced: the shared manifold of intersubjectivity. I will posit that it is through this shared manifold that it is possible for us to recognize other human beings as similar to us. It is just because of this shared manifold that intersubjective communication and ascription of intentionality become possible. It will be argued that the same neural structures that are involved in processing and controlling executed actions, felt sensations and emotions are also active when the same actions, sensations and emotions are to be detected in others. It therefore appears that a whole range of different "mirror matching mechanisms" may be present in our brain. This matching mechanism, constituted by mirror neurons originally discovered and described in the domain of action, could well be a basic organizational feature of our brain, enabling our rich and diversified intersubjective experiences. This perspective is in a position to offer a global approach to the understanding of the vulnerability to major psychoses such as schizophrenia.}, author = {Gallese, Vittorio}, journal = {Psychopathology}, number = {4}, pages = {171--180}, title = {{The roots of empathy: the shared manifold hypothesis and the neural basis of intersubjectivity}}, volume = {36}, year = {2003} } @article{Bradley2007, abstract = {This handbook will help to advance research in emotion by encouraging researchers to take greater advantage of standard and well-researched approaches, which will increase both theproductivity in the field and the speed and accuracy with which}, author = {Bradley, Margaret M and Lang, P J}, institution = {University of Florida, Center for Research in Psychophysiology, Gainesville, Fl, USA.}, journal = {Emotion}, pages = {29--46}, publisher = {University of Florida}, title = {{The International Affective Digitized Sounds Affective Ratings of Sounds and Instruction Manual}}, url = {http://scholar.google.com/scholar?hl=en\&btnG=Search\&q=intitle:The+International+Affective+Digitized+Sounds+Affective+Ratings+of+Sounds+and+Instruction+Manual\#1}, year = {2007} } @inproceedings{Curtis2008, address = {Brussels, Belgium}, author = {Curtis, Dorothy and Shih, Eugene and Waterman, Jason and Guttag, John and Bailey, Jacob and Stair, Thomas and Greenes, Robert A and Ohno-machado, Lucila}, booktitle = {Proceedings of the ICST 3rd International Conference on Body Area Networks (BodyNets '08)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Curtis et al. - 2008 - Physiological Signal Monitoring in the Waiting Areas of an Emergency Room.pdf:pdf}, keywords = {physiological signal monitoring,sensor network}, publisher = {Institute for Computer Sciences, Social-Informatics and Telecommunications Engineering (ICST)}, title = {{Physiological Signal Monitoring in the Waiting Areas of an Emergency Room}}, year = {2008} } @article{Heimendinger2007, abstract = {The purpose of this article is to report the process outcomes of a coaching methodology used in a study designed to increase fruit and vegetable consumption and physical activity in families. Eighty-eight families with second graders were recruited from a rural, biethnic community in Colorado and randomized to intervention and delayed intervention conditions. This article reports on the 27 families in the delayed intervention group. Families received up to 10 home visits over 10 months from a family advisor and completed activities to improve their dietary and physical activity behaviors. Coaching conversations took place during each home visit. Coaching process outcomes were evaluated by analysis of visit documentation, participant survey, and qualitative interviews. Results indicated that coaching, in conjunction with family activities, engaged families in the process of change and facilitated movement toward the achievement of their weekly nutrition or physical activity goals. Coaching methodology may be particularly useful for participatory research.}, author = {Heimendinger, Jerianne and Uyeki, Terry and Andhara, Aurielle and Marshall, Julie A and Scarbro, Sharon and Belansky, Elaine and Crane, Lori}, institution = {Jerianneb@earthlink.net}, journal = {Health education behavior the official publication of the Society for Public Health Education}, keywords = {colorado,diet,exercise,fruit,health promotion,humans,interviews topic,professional family relations,vegetables}, number = {1}, pages = {71--89}, pmid = {16740515}, title = {{Coaching process outcomes of a family visit nutrition and physical activity intervention.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/16740515}, volume = {34}, year = {2007} } @book{Miller1995a, author = {Miller, William R. and Tonigan, JS and Longabaugh, R}, booktitle = {Psychology}, editor = {Mattson, Margaret E}, pages = {98}, publisher = {National Institute on Alcohol Abuse and Alcoholism}, series = {Project MATCH Monograph Series}, title = {{The Drinker Inventory of Consequences (DrInC): An Instrument for Assessing Adverse Consequences of Alcohol Abuse}}, url = {http://scholar.google.com/scholar?q=The+Drinker+Inventory+of+Consequences+\%28DrInC\%29.+An+Instrument+for+Assessing+Adverse+Consequences+of+Alcohol+Abuse\&hl=en\&btnG=Search\&as\_sdt=2001\&as\_sdtp=on\#1}, volume = {4}, year = {1995} } @misc{Ekman1980, author = {Ekman, Paul and Freisen, Wallace V. and Ancoli, Sonia}, booktitle = {Journal of Personality and Social Psychology}, doi = {10.1037/h0077722}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ekman, Freisen, Ancoli - 1980 - Facial signs of emotional experience.pdf:pdf}, issn = {0022-3514}, number = {6}, pages = {1125--1134}, title = {{Facial signs of emotional experience.}}, url = {http://content.apa.org/journals/psp/39/6/1125}, volume = {39}, year = {1980} } @book{C.E.Osgood1975, author = {{C.E. Osgood} and May, W.H. and Miron, M.S.}, publisher = {University of Illinois Press}, title = {{Cross-Cultural Universals of Affective Meaning}}, year = {1975} } @article{Vanbaaren2004, author = {Van baaren, Rick B. and Holland, Rob W. and Kawakami, Kerry and Knippenberg, Ad Van}, doi = {10.1111/j.0963-7214.2004.01501012.x}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Van baaren et al. - 2004 - Mimicry and Prosocial Behavior.pdf:pdf}, issn = {0956-7976}, journal = {Psychological Science}, month = jan, number = {1}, pages = {71--74}, title = {{Mimicry and Prosocial Behavior}}, url = {http://pss.sagepub.com/lookup/doi/10.1111/j.0963-7214.2004.01501012.x}, volume = {15}, year = {2004} } @article{Watson1985, abstract = {Reanalyses of 7 studies of self-reported mood by researchers such as M. A. Lebo and J. R. Nesselroade (see record 1979-30118-001) and J. A. Russell and D. Ridgeway (see record 1984-03807-001) indicate that Positive Affect and Negative Affect consistently emerge as the 1st 2 varimax rotated dimensions in orthogonal factor analyses or as the 1st 2 2nd-order factors derived from oblique solutions. The 2 factors emerged with varying sets of descriptors and were even replicated in several data sets characterized by possible methodological problems (e.g., acquiescence response bias, inappropriate response formats) noted by earlier authors. The results thus attest to the stability and robustness of Positive and Negative Affect in self-report. Because this same 2-dimensional configuration has also been consistently identified in most other major lines of mood research, it is now firmly established as the basic structure of English-language affect at the general factor level.}, author = {Watson, D and Tellegen, A}, journal = {Psychological Bulletin}, number = {2}, pages = {219--235}, pmid = {3901060}, publisher = {bepress}, title = {{Toward a consensual structure of mood.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/3901060}, volume = {98}, year = {1985} } @inproceedings{Amini2013a, abstract = {In this paper, we discuss a novel approach for the computer-delivery of Brief Motivational Interventions (BMIs) for health behavior change. We describe the basic elements of our system architecture, and focus on enabling a multimodal Embodied Conversational Agent (ECA) to deliver the health behavior change interventions empathetically by adapting, in real- time, its verbal and non-verbal communication messages to those of its clients. The designed empathy model integrates a cognitive component and an affective components. We then discuss the evaluation experiment that we designed and conducted to evaluate the impact of empathy model on users’ experience with the empathic character. Results indicate that, in comparison with the non-empathic counselor, the empathic one is better accepted (e.g., more enjoyable, empathizing, engaging, and likable) and some users might be willing to disclose more private information (e.g., drinking habits) to the counselor endowed with empathic abilities than the one without.}, address = {Philadelphia, PA, USA}, author = {Amini, Reza and Lisetti, Christine and Yasavur, Ugan and Rishe, Naphtali}, booktitle = {IEEE International Conference on Healthcare Informatics 2013 (ICHI 2013)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Amini et al. - 2013 - On-Demand Virtual Health Counselor for Delivering Behavior-Change Health Interventions.pdf:pdf}, number = {1}, publisher = {IEEE}, title = {{On-Demand Virtual Health Counselor for Delivering Behavior-Change Health Interventions}}, year = {2013} } @article{Sloan2009, author = {Sloan, Robin James Stuart and Cook, Malcolm and Robinson, Brian}, doi = {10.1109/VIZ.2009.28}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sloan, Cook, Robinson - 2009 - Considerations for Believable Emotional Facial Expression Animation.pdf:pdf}, isbn = {978-0-7695-3734-4}, journal = {2009 Second International Conference in Visualisation}, keywords = {- character animation,believability,emotional expression,facial animation,inform artistic practice,of and between emotional,perception,produce and test animations,the primary goal of,the project is to}, month = jul, pages = {61--66}, publisher = {Ieee}, title = {{Considerations for Believable Emotional Facial Expression Animation}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5230711}, year = {2009} } @article{Meng2009, author = {Meng, Qinggang and Lee, Mark}, doi = {10.1109/CASE.2009.156}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Meng, Lee - 2009 - Empathy between Human and Home Service Robots.pdf:pdf}, isbn = {978-0-7695-3728-3}, journal = {2009 IITA International Conference on Control, Automation and Systems Engineering (case 2009)}, keywords = {-home service robots,human-robot interaction}, month = jul, pages = {220--224}, publisher = {Ieee}, title = {{Empathy between Human and Home Service Robots}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5194430}, year = {2009} } @article{Newman2002, abstract = {OBJECTIVES: This study assessed the differential effects of face-to-face interviewing and audio-computer assisted self-interviewing (audio-CASI) on categories of questions. METHODS: Syringe exchange program participants (n = 1417) completed face-to-face interviews or audio-CASI. The questionnaire was categorized into the groups "stigmatized behaviors," "neutral behaviors," and "psychological distress." Interview modes were compared for questions from each category. RESULTS: Audio-CASI elicited more frequent reporting of "stigmatized behaviors" than face-to-face interviews. Face-to-face interviewing elicited more frequent reporting of "psychological distress" than audio-CASI. CONCLUSIONS: Responding to potentially sensitive questions should not be seen as merely "providing data," but rather as an activity with complex motivations. These motivations can include maintaining social respect, obtaining social support, and altruism. Ideally, procedures for collecting self-report data would maximize altruistic motivation while accommodating the other motives.}, author = {Newman, Jessica Clark and {Des Jarlais}, Don C and Turner, Charles F and Gribble, Jay and Cooley, Phillip and Paone, Denise}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Newman et al. - 2002 - The differential effects of face-to-face and computer interview modes.pdf:pdf}, issn = {0090-0036}, journal = {American journal of public health}, keywords = {Adult,Analysis of Variance,Computers,Female,HIV Infections,HIV Infections: prevention \& control,Humans,Interviews as Topic,Interviews as Topic: methods,Intravenous,Male,Needle-Exchange Programs,Needle-Exchange Programs: utilization,Odds Ratio,Risk-Taking,Self Disclosure,Substance Abuse,Tape Recording,United States}, month = mar, number = {2}, pages = {294--7}, pmid = {11818309}, title = {{The differential effects of face-to-face and computer interview modes.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=1447060\&tool=pmcentrez\&rendertype=abstract}, volume = {92}, year = {2002} } @inproceedings{Johnson2004, abstract = {Embodied conversational agents (ECA) have potential as facilitators for health interventions. However, their utility is limited as long as people must sit down in front of a computer to access them. This paper describes a project that is deploying an ECA on a handheld computer, and using it to assist in a psychosocial intervention aimed at providing training in problem solving skills. The agent is based upon the virtual trainer/counselor in the pedagogical drama Carmen’s Bright IDEAS, adapted for handheld use and for interaction with a human caregiver. The system will go into clinical trails in August of 2004. The paper discusses the design and technical issues involved in the transition from laptop computer to handheld device and from 3rd-person view to first-person interaction, and the plan for evaluation. The clinical trial is designed both to evaluate psychosocial outcomes and to assess user preferences in ECA interaction modalities over the course of multiple sessions of use.}, author = {Johnson, WL and LaBore, C}, booktitle = {AAAI Fall Symposium on Dialogue Systems for Health Communication}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Johnson, LaBore - 2004 - A pedagogical agent for psychosocial intervention on a handheld computer.pdf:pdf}, pages = {22--24}, title = {{A pedagogical agent for psychosocial intervention on a handheld computer}}, year = {2004} } @article{Suchman1997, abstract = {To formulate an empirically derived model of empathic communication in medical interviews by describing the specific behaviors and patterns of interaction associated with verbal expressions of emotion.}, author = {Suchman, A L and Markakis, K and Beckman, H B and Frankel, R}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Suchman et al. - 1997 - A model of empathic communication in the medical interview.pdf:pdf}, issn = {0098-7484}, journal = {JAMA : the journal of the American Medical Association}, keywords = {Communication,Empathy,Humans,Interviews as Topic,Models,Physician-Patient Relations,Psychological}, month = feb, number = {8}, pages = {678--82}, pmid = {9039890}, title = {{A model of empathic communication in the medical interview.}}, volume = {277}, year = {1997} } @article{Niewiadomski2008, author = {Niewiadomski, Radoslaw and Ochs, Magalie}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Niewiadomski, Ochs - 2008 - Expressions of empathy in ECAs.pdf:pdf}, journal = {Intelligent Virtual Agents}, keywords = {eca,empathy,facial expressions}, pages = {37--44}, title = {{Expressions of empathy in ECAs}}, url = {http://www.springerlink.com/index/618982507263X720.pdf}, year = {2008} } @article{Beckman1984, abstract = {Determining the patient's major reasons for seeking care is of critical importance in a successful medical encounter. To study the physician's role in soliciting and developing the patient's concerns at the outset of a clinical encounter, 74 office visits were recorded. In only 17 (23\%) of the visits was the patient provided the opportunity to complete his or her opening statement of concerns. In 51 (69\%) of the visits the physician interrupted the patient's statement and directed questions toward a specific concern; in only 1 of these 51 visits was the patient afforded the opportunity to complete the opening statement. In six (8\%) return visits, no solicitation whatever was made. Physicians play an active role in regulating the quantity of information elicited at the beginning of the clinical encounter, and use closed-ended questioning to control the discourse. The consequence of this controlled style is the premature interruption of patients, resulting in the potential loss of relevant information.}, author = {Beckman, H B and Frankel, R M}, issn = {00034819}, journal = {Annals of internal medicine}, number = {5}, pages = {692--696}, title = {{The effect of physician behavior on the collection of data}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/6486600}, volume = {101}, year = {1984} } @article{Noar2007, abstract = {Although there is a large and growing literature on tailored print health behavior change interventions, it is currently not known if or to what extent tailoring works. The current study provides a meta-analytic review of this literature, with a primary focus on the effects of tailoring. A comprehensive search strategy yielded 57 studies that met inclusion criteria. Those studies-which contained a cumulative N = 58,454-were subsequently meta-analyzed. The sample size-weighted mean effect size of the effects of tailoring on health behavior change was found to be r = .074. Variables that were found to significantly moderate the effect included (a) type of comparison condition, (b) health behavior, (c) type of participant population (both type of recruitment and country of sample), (d) type of print material, (e) number of intervention contacts, (f) length of follow-up, (g) number and type of theoretical concepts tailored on, and (h) whether demographics and/or behavior were tailored on. Implications of these results are discussed and future directions for research on tailored health messages and interventions are offered.}, author = {Noar, Seth M and Benac, Christina N and Harris, Melissa S}, institution = {Department of Communication, University of Kentucky, Lexington, KY 40506-0042, USA. snoar2@uky.edu}, journal = {Psychological Bulletin}, keywords = {adolescent,adult,aged,child,communication,female,health behavior,health education,health education methods,health promotion,health promotion methods,humans,male,middle aged,patient acceptance health care,patient acceptance health care psychology,teaching materials,united states}, number = {4}, pages = {673--693}, pmid = {17592961}, publisher = {American Psychological Association}, title = {{Does tailoring matter? Meta-analytic review of tailored print health behavior change interventions.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17592961}, volume = {133}, year = {2007} } @misc{Ostermann1998, abstract = {MPEG-4 is the first international standard that standardizes true multimedia communication-including natural and synthetic audio, natural and synthetic video, as well as 3D graphics. Integrated into this standard is the capability to define and animate virtual humans consisting of synthetic heads and bodies. For the head, more than 70 model-independent animation parameters defining low-level actions like \&ldquo;move left mouth corner\&rdquo; up to high-level parameters like facial expressions and visemes are standardized In a communication application. The encoder can define the face model using MPEG-4 BIFS (BInary Format for Scenes) and transmit it to the decoder. Alternatively, the encoder can rely on a face model that is available at the decoder. The animation parameters are quantized, predictively encoded using an arithmetic encoder or a DCT. The decoder receives the model and the animation parameters in order to animate the model. Since MPEG-4 defines the minimum MPEG-4 terminal capabilities in profiles and levels, the encoder knows the quality of the animation at the decoder}, author = {Ostermann, J\"{o}rn}, booktitle = {Proceedings Computer Animation 98 Cat No98EX169}, doi = {10.1109/CA.1998.681907}, isbn = {0818685417}, issn = {10874844}, keywords = {facial animation,fap,mpeg4,synthetic faces}, pages = {49--51}, title = {{Animation of synthetic faces in MPEG-4}}, year = {1998} } @article{Benamara2007, abstract = {To date, there is almost no work on the use of adverbs in sentiment analysis, nor has there been any work on the use of adverb-adjective combinations (AACs). We propose an AAC-based sentiment analysis technique that uses a linguistic analysis of adverbs of degree. We define a set of general axioms (based on a classification of adverbs of degree into five categories) that all adverb scoring techniques must satisfy. Instead of aggregating scores of both adverbs and adjectives using simple scoring functions, we propose an axiomatic treatment of AACs based on the linguistic classification of adverbs. Three specific AAC scoring methods that satisfy the axioms are presented. We describe the results of experiments on an annotated set of 200 news articles (annotated by 10 students) and compare our algorithms with some existing sentiment analysis algorithms. We show that our results lead to higher accuracy based on Pearson correlation with human subjects.}, author = {Benamara, Farah and Irit, Sabatier and Cesarano, Carmine and Federico, Napoli and Reforgiato, Diego}, journal = {In Proc of Int Conf on Weblogs and Social Media}, keywords = {adverb adjective combina,adverbs degree,sentiment analysis}, pages = {1--4}, title = {{Sentiment Analysis : Adjectives and Adverbs are better than Adjectives Alone}}, url = {http://www.icwsm.org/papers/3--Benamara-Cesarano-Picariello-Reforgiato-Subrahmanian.pdf}, year = {2007} } @article{Hatfield2009, author = {Hatfield, Elaine and Rapson, Richard L. and Le, Yen-Chi L.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hatfield, Rapson, Le - 2009 - Emotional Contagion and Empathy.pdf:pdf}, journal = {The social neuroscience of empathy}, pages = {1--20}, title = {{Emotional Contagion and Empathy}}, url = {http://books.google.com/books?hl=en\&lr=\&id=KLvJKTN\_nDoC\&oi=fnd\&pg=PA19\&dq=Emotional+Contagion+and+Empathy\&ots=gC929Xij3X\&sig=IFpRxpx1igOlZl86Jr837oVgfhY}, year = {2009} } @article{Grynberg2010, abstract = {Alexithymia and empathy have been related but very little is known on shared variance between their respective affective and cognitive dimensions. We examined this question with correlations, as well as both exploratory and confirmatory analyses, and controlled for anxiety and depression. The responses of 645 young adults to self-report questionnaires of alexithymia (TAS-20), empathy (IRI), anxiety (STAI-T) and depression (BDI-13) were examined. We observed associations between the proposed cog- nitive components of alexithymia (externally-oriented thinking) and that of empathy (perspective taking, fantasy) as well as empathic concern, which were insensitive to anxiety or depression. In contrast, asso- ciations between the proposed affective components of alexithymia (difficulty identifying feelings, diffi- culty describing feelings) and empathy (personal distress) were largely due to shared covariance with anxiety. A model encompassing an affective and a cognitive (including empathic concern) latent factors emerged, even after controlling for dysphoric affects. These findings suggest specific associations between cognitive and affective components of both constructs that were dissimilarly affected by anxiety and depression. The allocation of empathic concern to the cognitive factor is also discussed.}, author = {Grynberg, Delphine and Luminet, Olivier and Corneille, Olivier and Gr\`{e}zes, Julie and Berthoz, Sylvie}, doi = {10.1016/j.paid.2010.07.013}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Grynberg et al. - 2010 - Alexithymia in the interpersonal domain A general deficit of empathy.pdf:pdf}, issn = {01918869}, journal = {Personality and Individual Differences}, keywords = {Alexithymia,Anxiety,Depression,Empathy,IRI,TAS-20}, mendeley-tags = {Alexithymia,Anxiety,Depression,Empathy,IRI,TAS-20}, month = dec, number = {8}, pages = {845--850}, publisher = {Elsevier Ltd}, title = {{Alexithymia in the interpersonal domain: A general deficit of empathy?}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S019188691000365X}, volume = {49}, year = {2010} } @book{Widmark1981, address = {Davis, California}, author = {Widmark, Erik Matteo Prochet}, isbn = {0931890071, 9780931890079}, pages = {163}, publisher = {Biomedical Publications}, title = {{Principles and Applications of Medicolegal Alcohol Determination}}, year = {1981} } @book{Andreassi2009, author = {Andreassi, John L.}, edition = {5}, isbn = {978-0805828337}, pages = {488}, publisher = {Taylor \& Francis}, title = {{Psychophysiology: Human Behavior and Physiological Response}}, year = {2009} } @article{Stoiber2010, abstract = {A new animation system produces realistic expressive facial motion at interactive speed. It employs motion models that control facial-expression dynamics and retain the expressions' temporal signature learned from motion capture data. A nondeterministic component ensures the variety of the long-term visual behavior. This system can efficiently animate any synthetic face.}, author = {Stoiber, Nicolas and Breton, Gaspard and S\'{e}guier, Renaud}, doi = {10.1109/MCG.2010.40}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Stoiber, Breton, S\'{e}guier - 2010 - Modeling Short-Term Dynamics and Variability for Realistic Interactive Facial Animation.pdf:pdf}, journal = {Computer Graphics and Applications}, number = {4}, pages = {51--61}, title = {{Modeling Short-Term Dynamics and Variability for Realistic Interactive Facial Animation}}, volume = {30}, year = {2010} } @inproceedings{Huang2010a, abstract = {Backchannel feedback is an important kind of nonverbal feedback within face-to-face interaction that signals a person's interest, attention and willingness to keep listening. Learning to predict when to give such feedback is one of the keys to creating natural and realistic virtual humans. Prediction models are traditionally learned from large corpora of annotated face-to-face interactions, but this approach has several limitations. Previously, we proposed a novel data collection method, Parasocial Consensus Sampling, which addresses these limitations. In this paper, we show that data collected in this manner can produce effective learned models. A subjective evaluation shows that the virtual human driven by the resulting probabilistic model significantly outperforms a previously published rule-based agent in terms of rapport, perceived accuracy and naturalness, and it is even better than the virtual human driven by real listeners' behavior in some cases.}, author = {Huang, Lixing and Morency, Louis-philippe and Gratch, Jonathan}, booktitle = {Proceedings of the 10th international Conference on Intelligent Virtual Agents (IVA'10)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Huang, Morency, Gratch - 2010 - Learning Backchannel Prediction Model from Parasocial Consensus Sampling A Subjective Evaluation.pdf:pdf}, keywords = {backchannel prediction,parasocial interaction,virtual human}, pages = {159--172}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{Learning Backchannel Prediction Model from Parasocial Consensus Sampling : A Subjective Evaluation}}, year = {2010} } @incollection{Hussain2015, author = {Hussain, M. Sazzad and D'Mello, Sidney K. and Calvo, Rafael A.}, booktitle = {The Oxford Handbook of Affective Computing}, chapter = {25}, edition = {1}, isbn = {978-0-19-994223-7}, pages = {349--357}, publisher = {Oxford University Press}, title = {{Research and Development Tools in Affective Computing}}, year = {2015} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{Nischt2006, abstract = {MPML3D is our first candidate of the next generation of authoring languages aimed at supporting digital content creators in providing highly appealing and highly interactive content with little effort. The language is based on our previously developed family of Multimodal Presentation Markup Languages (MPML) that broadly followed the \^{a}? sequential\^{a}? and \^{a}? parallel\^{a}? tagging structure scheme for generating presynchronized presentations featuring life-like characters and interactions with the user. The new markup language MPML3D deviates from this design framework and proposes a reactive model instead, which is apt to handle interaction-rich scenarios with highly realistic 3D characters. Interaction in previous versions of MPML could be handled only at the cost of considerable scripting effort due to branching. By contrast, MPML3D advocates a reactive model that allows perceptions of other characters or the user interfere with the presentation flow at any time, and thus facilitates natural and unrestricted interaction. MPML3D is designed as a powerful and flexible language that is easy-to-use by non-experts, but it is also extensible as it allows content creators to add functionality such as a narrative model by using popular scripting languages.}, author = {Nischt, Michael and Prendinger, Helmut and Andr\'{e}, Elisabeth and Ishizuka, Mitsuru}, isbn = {9101007118}, journal = {Lecture Notes in Computer Science}, number = {1}, pages = {218--229}, publisher = {Springer}, title = {{MPML3D: a reactive framework for the Multimodal Presentation Markup Language}}, url = {http://www.springerlink.com/index/j7217706p7658021.pdf}, volume = {62}, year = {2006} } @incollection{Dautenhahn2002, author = {Dautenhahn, Kerstin and Bond, Alan and Ca\~{n}amero, Lola and Edmonds, Bruce}, booktitle = {Socially Intelligent Agents: Creating Relationships with Computers and Robots}, chapter = {1}, editor = {Dautenhahn, Kerstin and Bond, Alan and Ca\~{n}amero, Lola and Edmonds, Bruce}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Dautenhahn et al. - 2002 - Creating Relationships with Computers and Robots.pdf:pdf}, isbn = {978-1-4020-7057-0}, pages = {1--20}, publisher = {Springer}, title = {{Creating Relationships with Computers and Robots}}, url = {http://www.springerlink.com/index/V38H434X220766G8.pdf}, year = {2002} } @inproceedings{Dahlberg2008, abstract = {Research experiences for undergraduates are considered an effective means for increasing student retention and encouraging undergraduate students to continue on to graduate school. However, managing a cohort of undergraduate researchers, with varying skill levels, can be daunting for faculty advisors. We have developed a program to engage students in research and outreach in visualization, virtual reality, networked robotics, and interactive games. Our program immerses students into the life of a lab, employing a situated learning approach that includes tiered mentoring and collaboration to enable students at all levels to contribute to research. Students work in research comprised of other undergraduates, graduate students and faculty, and participate in professional development and social gatherings within the larger cohort. Results from our first two years indicate this approach is manageable and effective for increasing students’ ability and desire to conduct research.}, address = {New York, New York, USA}, author = {Dahlberg, Teresa and Barnes, Tiffany and Rorrer, Audrey and Powell, Eve and Cairco, Lauren}, booktitle = {Proceedings of the 39th SIGCSE technical symposium on Computer science education - SIGCSE '08}, doi = {10.1145/1352135.1352293}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Dahlberg et al. - 2008 - Improving retention and graduate recruitment through immersive research experiences for undergraduates.pdf:pdf}, isbn = {9781595937995}, keywords = {education,undergraduate research}, pages = {466}, publisher = {ACM Press}, title = {{Improving retention and graduate recruitment through immersive research experiences for undergraduates}}, url = {http://portal.acm.org/citation.cfm?doid=1352135.1352293}, year = {2008} } @article{Brockmyer2009, author = {Brockmyer, Jeanne H. and Fox, Christine M. and Curtiss, Kathleen a. and McBroom, Evan and Burkhart, Kimberly M. and Pidruzny, Jacquelyn N.}, doi = {10.1016/j.jesp.2009.02.016}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Brockmyer et al. - 2009 - The development of the Game Engagement Questionnaire A measure of engagement in video game-playing.pdf:pdf}, issn = {00221031}, journal = {Journal of Experimental Social Psychology}, keywords = {Measurement Rasch,Psychological absorption,Psychological engagement Immersion Presence Flow,Video games Violence}, month = jul, number = {4}, pages = {624--634}, publisher = {Elsevier Inc.}, title = {{The development of the Game Engagement Questionnaire: A measure of engagement in video game-playing}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0022103109000444}, volume = {45}, year = {2009} } @incollection{Marsella2010, address = {Oxford}, author = {Marsella, Stacy and Gratch, Jonathan and Petta, Paolo}, booktitle = {A blueprint for an affectively competent agent: Cross-fertilization between Emotion Psychology, Affective Neuroscience, and Affective Computing}, chapter = {1.2}, editor = {Scherer, K.R. and B\"{a}nziger, T. and Roesch, E.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Marsella, Gratch, Petta - 2010 - Computational models of emotion.pdf:pdf}, pages = {21--41}, publisher = {Oxford University Press}, title = {{Computational models of emotion}}, url = {http://books.google.com/books?hl=en\&lr=\&id=C2gLOQ105okC\&oi=fnd\&pg=PA21\&dq=Computational+Models+of+Emotion\&ots=-KhMV4apCZ\&sig=zkBUHtiFB4V-MrzE7g494GGGKHY}, year = {2010} } @incollection{Bavelas1987, address = {Cambridge, UK}, author = {Bavelas, Janet Beavin and Black, Alex and Lemery, Charles R. and Mullett, Jennifer}, booktitle = {Motor mimicry as primitive empathy}, editor = {Eisenberg, Nancy and Strayer, Janet}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bavelas et al. - 1987 - Empathy and its developement.pdf:pdf}, pages = {317--338}, publisher = {Cambridge University Press}, title = {{Empathy and its developement}}, year = {1987} } @inproceedings{Schipor2011, author = {Schipor, O.A. and Pentiuc, S.G. and Schipor, M.D.}, booktitle = {Speech Technology and Human-Computer Dialogue (SpeD), 2011 6th Conference on}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Schipor, Pentiuc, Schipor - 2011 - Towards a multimodal emotion recognition framework to be integrated in a Computer Based Speech Therap.pdf:pdf}, isbn = {9781457704413}, keywords = {-computer assisted,multimodal interfaces,recognition}, pages = {1--6}, publisher = {IEEE}, title = {{Towards a multimodal emotion recognition framework to be integrated in a Computer Based Speech Therapy System}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5940727}, year = {2011} } @inproceedings{Steunebrink2009, author = {Steunebrink, B.R. and Dastani, Mehdi and Meyer, J.J.C.}, booktitle = {Proceedings of the 4th Workshop on Emotion and Computing}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Steunebrink, Dastani, Meyer - 2009 - The OCC model revisited.pdf:pdf}, title = {{The OCC model revisited}}, url = {http://www.idsia.ch/~steunebrink/Publications/KI09\_OCC\_revisited.pdf}, year = {2009} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @phdthesis{Becker-Asano2008, author = {Becker-Asano, Christian}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Becker-Asano - 2008 - WASABI Affect simulation for agents with believable interactivity.pdf:pdf}, keywords = {Emotion,Empathy,PhD Thesis,Secondary Emotions,primary Emotions}, mendeley-tags = {PhD Thesis}, pages = {186}, publisher = {IOS Press}, school = {University of Bielefeld}, title = {{WASABI: Affect simulation for agents with believable interactivity}}, type = {PhD Dissertation, IOS Press (DISKI 319)}, url = {http://books.google.com/books?hl=en\&lr=\&id=8ABvlwHBCQIC\&oi=fnd\&pg=PA1\&dq=WASABI+:+Affect+Simulation+for+Agents+with+Believable+Interactivity\&ots=m6MhCZ6IzD\&sig=IcDYrCYofbGlJ8E1szs\_wltd18k}, volume = {319}, year = {2008} } @article{Novielli2010, abstract = {We describe how the interaction mode with an embodied conversational agent (ECA) affects the users’ perception of the agent and their behavior during interaction, and propose a method to recognize the social attitude of users towards the agent from their verbal behavior. A corpus of human–ECA dialogues was collected with a Wizard-of-Oz study in which the input mode of the user moves was varied (written vs. speech-based). After labeling the corpus, we evaluated the relationship between input mode and social attitude of users towards the agent. The results show that, by increasing naturalness of interaction, spoken input produces a warmer attitude of users and a richer language: this effect is more evident for users with a background in humanities. Recognition of signs of social attitude is needed for adapting the ECA’s verbal and nonverbal behavior.}, author = {Novielli, Nicole and de Rosis, Fiorella and Mazzotta, Irene}, doi = {10.1016/j.pragma.2009.12.016}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Novielli, de Rosis, Mazzotta - 2010 - User attitude towards an embodied conversational agent Effects of the interaction mode.pdf:pdf}, issn = {03782166}, journal = {Journal of Pragmatics}, keywords = {evaluation of artificial agents,natural language user interfaces,user-centered design}, month = sep, number = {9}, pages = {2385--2397}, publisher = {Elsevier B.V.}, title = {{User attitude towards an embodied conversational agent: Effects of the interaction mode}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0378216609003324}, volume = {42}, year = {2010} } @inproceedings{Bransky2011, abstract = {To understand the role that memory plays we have collected data from three online experimental sessions in which participants inter- act with our virtual real-estate agent in both a recall and forget mode. We found that partial forgetting and even total loss of recall of an item, whether domain or social-based, was more believable and less frustrating than incorrect recall.}, author = {Bransky, Karla and Richards, Debbie}, booktitle = {Intelligent Virtual Agents 10th International Conference (IVA 2011)}, doi = {10.1007/978-3-642-23974-8\_49}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bransky, Richards - 2011 - Users ’ s Expectations of IVA Recall and Forgetting.pdf:pdf}, keywords = {forget-,intelligent virtual agents,memory,remembering}, pages = {433--434}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{Users ’ s Expectations of IVA Recall and Forgetting}}, year = {2011} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @incollection{Ekman1979, author = {Ekman, Paul}, booktitle = {Human Ethology}, chapter = {3}, editor = {Cranach, M. Von and Foppa, K. and Lepenies, W. and Ploog, D.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ekman - 1979 - About Brows Emotional And Conversational Signals.pdf:pdf}, pages = {169--249}, publisher = {Cambridge University Press}, title = {{About Brows: Emotional And Conversational Signals}}, year = {1979} } @incollection{Chung2007, abstract = {The present paper focuses on the influence of avatar creation in a video game. More specifically, this study investigates the effects of avatar creation on attitude towards avatar, empathy, presence, and para-social interaction of female non-game users. As a cyber-self, an avatar is a graphic character representing a user in cyberspace. Avatars are primarily used in the entertainment industry as high-tech novelties, controlled by game users, for high-end video games. Some games provide game characters by default that users cannot change, but other games provide various options gamers can choose. What if game users can create their own avatars? Do they have more psychological closeness with their avatars as their cyber-selves? This study tested the differences of attitude, empathy, presence, and para-social interaction of female non-game users between an avatar creation group and a non-avatar creation group and resulted in no difference.}, author = {Chung, Donghun and DeBuys, Brahm and Nam, Chang}, booktitle = {Human-Computer Interaction. Interaction Design and Usability}, doi = {10.1007/978-3-540-73105-4\_78}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Chung, DeBuys, Nam - 2007 - Influence of avatar creation on attitude, empathy, presence, and para-social interaction.pdf:pdf}, isbn = {978-3-540-73104-7}, keywords = {Avatar - Attitude - Empathy - Presence - Para-Soci}, pages = {711--720}, publisher = {Springer Berlin / Heidelberg}, title = {{Influence of avatar creation on attitude, empathy, presence, and para-social interaction}}, url = {http://www.springerlink.com/index/9518116J51670433.pdf}, year = {2007} } @inproceedings{Yasavur2012, abstract = {In this paper, we have proposed a user model for com- puter based drinking behavior change intervention and rec- ommender systems. We discuss speci c requirements of user modeling in health promotion and speci cally alco- hol interventions. We believe that making behavior change systems available pervasively may lead to better and sus- tainable results. Therefore, our proposed user model takes advantage of the target-behavior related features such as contextual features (e.g., social interactions, location, and time). The proposed user model uses well-validated ques- tionnaires to capture target-behavior speci c aspects. We also introduced approaches for enhancing users' experience in the model creation stage by using Embodied Conversa- tional Agents(ECAs) and users' a ective states.}, address = {Dublin, Ireland}, author = {Yasavur, Ugan and Amini, Reza and Lisetti, Christine L}, booktitle = {First International Workshop on Recommendation Technologies for Lifestyle Change 2012 (LIFESTYLE 2012)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Yasavur, Amini, Lisetti - 2012 - User Modeling for Pervasive Alcohol Intervention Systems.pdf:pdf}, keywords = {User modeling,alcohol intervention,behavior change,lifestyle change recommender systems (LSCRS).,tailoring}, pages = {29--34}, title = {{User Modeling for Pervasive Alcohol Intervention Systems}}, url = {http://ceur-ws.org/Vol-891/LIFESTYLE\_INTERFACERS\_2012\_proceedings.pdf\#page=29}, year = {2012} } @article{Mantri2012, author = {Mantri, MS and Pawar, MS and Javale, D}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mantri, Pawar, Javale - 2012 - Survey on Multi-domain Physiological Activity Recognition System.pdf:pdf}, journal = {International Journal of Engineering Research and Applications (IJERA)}, keywords = {- electrocardiogram,cepstral domain system,domain system,multi-domain,physiological activity recognition system,time}, number = {2}, pages = {551--554}, title = {{Survey on Multi-domain Physiological Activity Recognition System}}, url = {http://scholar.google.com/scholar?hl=en\&btnG=Search\&q=intitle:Survey+on+Multi-domain+Physiological+Activity+Recognition+System\#0}, volume = {2}, year = {2012} } @phdthesis{Floyd2009, author = {Floyd, David Richard}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Floyd - 2009 - RIGGING FOR FACIAL ANIMATION BASED ON THE FACIAL ACTION CODING SYSTEM RIGGING FOR FACIAL ANIMATION BASED ON THE.pdf:pdf}, pages = {1--38}, school = {University of Georgia}, title = {{RIGGING FOR FACIAL ANIMATION BASED ON THE FACIAL ACTION CODING SYSTEM RIGGING FOR FACIAL ANIMATION BASED ON THE}}, type = {Master Thesis}, year = {2009} } @book{Goldstein1985, author = {Goldstein, Arnold P. and Michaels, Gerald Y.}, edition = {1}, isbn = {089859538X}, pages = {304}, publisher = {Hillsdale, N.J. : L. Erlbaum Associates}, title = {{Empathy: development, training, and consequences}}, year = {1985} } @inproceedings{Boukricha2011c, abstract = {Empathy is believed to play a prominent role in contributing to an efficient and satisfying cooperative social interaction by adjusting one's own behavior to that of others. Thus, endowing virtual humans with the ability to empathize not only enhances their cooperative social skills, but also makes them more likeable, trustworthy, and caring. Supported by psychological models of empathy, we propose an approach to model empathy for EMMA - an Empathic MultiModal Agent - based on three processing steps: First, the Empathy Mechanism consists of an internal simulation of perceived emotional facial expressions and results in an internal emotional feedback that represents the empathic emotion. Second, the Empathy Modulation consists of modulating the empathic emotion through different predefined modulation factors. Third, the Expression of Empathy consists of triggering EMMA's multiple modalities like facial and verbal behaviors. In a conversational agent scenario involving the virtual humans MAX and EMMA, we illustrate our proposed model of empathy and we introduce a planned empirical evaluation of EMMA's empathic behavior.}, address = {Paris, France}, author = {Boukricha, Hana and Wachsmuth, Ipke}, booktitle = {Proceedings of the IEEE SSCI2011 - Symposium Series on Computational Intelligence, Workshop on Affective Computational Intelligence (WACI)}, doi = {10.1109/WACI.2011.5953146}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Boukricha, Wachsmuth - 2011 - Mechanism, modulation, and expression of empathy in a virtual human.pdf:pdf}, isbn = {9781612840840}, pages = {30 -- 37}, publisher = {IEEE}, title = {{Mechanism, modulation, and expression of empathy in a virtual human}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5953146}, year = {2011} } @article{Beaupre2005, author = {Beaupre, M. G. and Hess, U.}, doi = {10.1177/0022022104273656}, journal = {Journal of Cross-Cultural Psychology}, pages = {355--370}, title = {{Cross-cultural emotion recognition among Canadian ethnic groups}}, volume = {36}, year = {2005} } @article{Jennett2008, author = {Jennett, Charlene and Cox, Anna L. and Cairns, Paul and Dhoparee, Samira and Epps, Andrew and Tijs, Tim and Walton, Alison}, doi = {10.1016/j.ijhcs.2008.04.004}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Jennett et al. - 2008 - Measuring and defining the experience of immersion in games.pdf:pdf}, issn = {10715819}, journal = {International Journal of Human-Computer Studies}, month = sep, number = {9}, pages = {641--661}, title = {{Measuring and defining the experience of immersion in games}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S1071581908000499}, volume = {66}, year = {2008} } @book{Russell2010, author = {Russell, S.J. and Norvig, P.}, booktitle = {Artificial Intelligence}, edition = {3}, editor = {Russell, Stuart and Norvig, Peter}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Russell, Norvig - 2010 - Artificial intelligence a modern approach.pdf:pdf}, isbn = {9780136042594}, publisher = {Prentice hall}, title = {{Artificial intelligence: a modern approach}}, url = {http://www.just.edu.jo/CoursesAndLabs/ARTIFICAL INTELLIGENCE\_CS362/Syllabus\_362.doc}, year = {2010} } @article{Cichosz2007, author = {Cichosz, Jarosław}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cichosz - 2007 - Emotion recognition in speech signal using emotion-extracting binary decision trees.pdf:pdf}, journal = {Doctoral Consortium. ACII}, keywords = {emotion recognition,speech analysis}, pages = {1--8}, title = {{Emotion recognition in speech signal using emotion-extracting binary decision trees}}, url = {http://www.di.uniba.it/intint/DC-ACII07/Chicosz.pdf}, year = {2007} } @article{Barkham2001, abstract = {To complement the evidence-based practice paradigm, the authors argued for a core outcome measure to provide practice-based evidence for the psychological therapies. Utility requires instruments that are acceptable scientifically, as well as to service users, and a coordinated implementation of the measure at a national level. The development of the Clinical Outcomes in Routine Evaluation-Outcome Measure (CORE-OM) is summarized. Data are presented across 39 secondary-care services (n = 2,710) and within an intensively evaluated single service (n = 1,455). Results suggest that the CORE-OM is a valid and reliable measure for multiple settings and is acceptable to users and clinicians as well as policy makers. Baseline data levels of patient presenting problem severity, including risk, are reported in addition to outcome benchmarks that use the concept of reliable and clinically significant change. Basic quality improvement in outcomes for a single service is considered.}, author = {Barkham, M and Margison, F and Leach, C and Lucock, M and Mellor-Clark, J and Evans, C and Benson, L and Connell, J and Audin, K and McGrath, G}, issn = {0022006X}, journal = {Journal of Consulting and Clinical Psychology}, number = {2}, pages = {184--96}, title = {{Service profiling and outcomes benchmarking using the CORE-OM: toward practice-based evidence in the psychological therapies. Clinical Outcomes in Routine Evaluation-Outcome Measures}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/11393596?ordinalpos=17\&itool=EntrezSystem2.PEntrez.Pubmed.Pubmed\_ResultsPanel.Pubmed\_RVDocSum}, volume = {69}, year = {2001} } @inproceedings{Campbell2005, abstract = {Building relationships is a central concern for professionals (e.g., physicians, engineers, sales representatives, managers, etc.) because relationships promote a client's trust and loyalty. Rapport is a concept used to describe relationship quality and has two facets: enjoyable interactions and personal connection. Prior research has described the communication strategies of leaders for building better relationships with their subordinates and sales representatives with their customers by borrowing concepts from rapport management in sociolinguistics. The goal of this paper is to extend that work by demonstrating how rapport management applies to interaction between physicians and patients. The rapport management model helps us explain how professionals succeed or fail to build relationships with clients based on their verbal communication behavior.}, address = {Limerick, Ireland}, author = {Campbell, K.S.}, booktitle = {Proceedings in International Professional Communication Conference IPCC2005}, doi = {10.1109/IPCC.2005.1494206}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Campbell - 2005 - The rapport management model how physicians build relationships with patients.pdf:pdf}, isbn = {0-7803-9027-X}, keywords = {communication,face-to-face interaction,health,medical interviews,sociolinguistics,verbal communication}, pages = {422--432}, publisher = {IEEE}, title = {{The rapport management model: how physicians build relationships with patients}}, url = {http://ieeexplore.ieee.org/xpl/freeabs\_all.jsp?arnumber=1494206}, year = {2005} } @article{Liu2010, abstract = {Emotions accompany everyone in the daily life, playing a key role in non-verbal communication, and they are essential to the understanding of human behavior. Emotion recognition could be done from the text, speech, facial expression or gesture. In this paper, we concentrate on recognition of “inner” emotions from electroencephalogram (EEG) signals as humans could control their facial expressions or vocal intonation. The need and importance of the automatic emotion recognition from EEG signals has grown with increasing role of brain computer interface applications and development of new forms of human-centric and humandriven interaction with digital media. We propose fractal dimension based algorithm of quantification of basic emotions and describe its implementation as a feedback in 3D virtual environments. The user emotions are recognized and visualized in real time on his/her avatar adding one more so-called “emotion dimension” to human computer interfaces.}, author = {Liu, Yisi and Sourina, Olga and Nguyen, Minh Khoa}, doi = {10.1109/CW.2010.37}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Liu, Sourina, Nguyen - 2010 - Real-Time EEG-Based Human Emotion Recognition and Visualization.pdf:pdf}, isbn = {978-1-4244-8301-3}, journal = {2010 International Conference on Cyberworlds}, keywords = {BCI,EEG,HCI,emotion recognition,emotion visualization,fractal dimension}, month = oct, pages = {262--269}, publisher = {Ieee}, title = {{Real-Time EEG-Based Human Emotion Recognition and Visualization}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5656346}, year = {2010} } @incollection{Gunes2009, author = {Gunes, Hatice and Piccardi, Massimo}, booktitle = {Advanced Information and Knowledge Processing}, chapter = {10}, editor = {Monekosso, Dorothy and Remagnino, Paolo and Kuno, Yoshinori}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gunes, Piccardi - 2009 - From Monomodal to Multimodal Affect Recognition Using Visual Modalities.pdf:pdf}, isbn = {9781848003453}, pages = {161--189}, publisher = {Springer-Verlag}, title = {{From Monomodal to Multimodal: Affect Recognition Using Visual Modalities}}, year = {2009} } @book{Mowrer1960, address = {New York}, author = {Mowrer, Orval Hobart}, pages = {555}, publisher = {Wiley}, title = {{Learning theory and behavior}}, year = {1960} } @article{DeRosis2006, abstract = {In this paper, we describe our experience with the design and implementation of an embodied conversational agent (ECA) that converses with users to change their dietary behavior. Our intent is to develop a system that dynamically models the agent and the user and adapts the agent's counseling dialog accordingly. Towards this end, we discuss our efforts to automatically determine the user's dietary behavior stage of change and attitude towards the agent on the basis of unconstrained typed text dialog, first with another person and then with an ECA controlled by an experimenter in a wizard of Oz study. We describe how the results of these studies have been incorporated into an algorithm that combines the results from simple parsing rules together with contextual features using a Bayesian network to determine user stage and attitude automatically.}, author = {de Rosis, Fiorella and Novielli, Nicole and Carofiglio, Valeria and Cavalluzzi, Addolorata and {De Carolis}, Berardina}, doi = {10.1016/j.jbi.2006.01.001}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/de Rosis et al. - 2006 - User modeling and adaptation in health promotion dialogs with an animated character.pdf:pdf}, issn = {1532-0480}, journal = {Journal of biomedical informatics}, keywords = {Artificial Intelligence,Communication,Health Promotion,Health Promotion: methods,Humans,Information Storage and Retrieval,Information Storage and Retrieval: methods,User-Computer Interface}, month = oct, number = {5}, pages = {514--31}, pmid = {16524784}, title = {{User modeling and adaptation in health promotion dialogs with an animated character.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/16524784}, volume = {39}, year = {2006} } @article{Robbins1994, abstract = {To date, cognitive and affective influences on performance evaluations have been addressed separately, although it is likely that affect may influence ratings indirectly through its impact on the cognitive processing involved in the evaluation. 83 management students participated in a study of the influence of affect on the cognitive processing of performance information. Results suggest that an affect-consistency bias influences ratings even though the cognitive processes that require some judgment indicated a bias toward both affect-consistent and affect-inconsistent performance. Additional findings suggest that the practical utility of affect as something distinct from past performance perceptions may be limited in field settings. Job-related affect, past performance perceptions, and social affect had similar influences on the cognitive process and ratings in performance evaluations. (PsycINFO Database Record (c) 2003 APA, all rights reserved)}, author = {Robbins, Tina L and DeNisi, Angelo S}, doi = {10.1037//0021-9010.79.3.341}, issn = {00219010}, journal = {Journal of Applied Psychology}, number = {3}, pages = {341--353}, publisher = {Elsevier}, title = {{A closer look at interpersonal affect as a distinct influence on cognitive processing in performance evaluations}}, url = {http://www.apa.org}, volume = {79}, year = {1994} } @article{Greenson1960, author = {Greenson, Ralph R}, journal = {International Journal of PsychoAnalysis}, pages = {418--424}, title = {{Empathy and its vicissitudes}}, volume = {41}, year = {1960} } @article{Leslie1987, abstract = {One of the major developments of the second year of human life is the emergence of the ability to pretend. A child's knowledge of a real situation is apparently contradicted and distorted by pretense. If, as generally assumed, the child is just beginning to construct a system for internally representing such knowledge, why is this system of representation not undermined by its use in both comprehend ing and producing pretense? In this article I present a theoretical analysis of the representational mechanism underlying this ability. This mechanism extends the power of the infant's existing capac- ity for (primary) representation, creating a capacity for metarepresentation. It is this, developing toward the end of infancy, that underlies the child's new abilities to pretend and to understand pretense in others. There is a striking isomorphism between the three fundamental forms of pretend play and three crucial logical properties of mental stale expressions in language. This isomorphism points to a common underlying form of internal representation that is here called metarepresenta- tion. A performance model, the decoupler, is outlined embodying ideas about how an infant might compute the complex function postulated to underlie pretend play. This model also reveals pretense as an early manifestation of the ability to understand mental states. Aspects of later preschool development, both normal and abnormal, are discussed in the light of the new model. This theory begins the task of characterizing the specific innate basis of our commonsense theory of mind.}, author = {Leslie, Alan M}, doi = {10.1037//0033-295X.94.4.412}, issn = {0033295X}, journal = {Psychological Review}, number = {4}, pages = {412--426}, publisher = {American Psychological Association}, title = {{Pretense and representation: The origins of "theory of mind."}}, url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/0033-295X.94.4.412}, volume = {94}, year = {1987} } @article{Devoldre2010, abstract = {Social support researchers and clinicians have repeatedly expressed the need to identify the antecedents of social support provision within close relationships. The aim of the present study is to investigate the extent to which individual differences in cognitive empathy (perspective taking) and affective empathy (empathic concern and personal distress) are predictive of social support provision in couples. Study 1 involved 83 female participants in a relatively young relationship; Study 2 involved 128 married couples. The authors used self-report measures in both studies to assess individual differences in empathy and participants' support provision behaviors. The main findings suggest a significant contribution of the different components of empathy with rather different pictures for each of these components. The authors discuss the present findings in light of existing theory and research on social support in relationships.}, author = {Devoldre, Inge and Davis, Mark H. and Verhofstadt, Lesley L and Buysse, Ann}, doi = {10.1080/00223981003648294}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Devoldre et al. - 2010 - Empathy and social support provision in couples social support and the need to study the underlying processes.pdf:pdf}, issn = {0022-3980}, journal = {The Journal of psychology}, keywords = {80 and over,Adolescent,Adult,Affect,Aged,Empathy,Family Characteristics,Female,Humans,Individuality,Male,Middle Aged,Personal Construct Theory,Personality Inventory,Personality Inventory: statistics \& numerical data,Psychometrics,Social Support,Young Adult}, number = {3}, pages = {259--284}, pmid = {20461931}, title = {{Empathy and social support provision in couples: social support and the need to study the underlying processes.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/21506454}, volume = {144}, year = {2010} } @incollection{Marsella2003, author = {Marsella, Stacy C and Gratch, Jonathan and Rickel, Jeff}, booktitle = {Life-Like Characters, Tools, Affective Functions, and Applications}, editor = {{Prendinger, H., Ishizuka}, M.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Marsella, Gratch, Rickel - 2003 - Expressive Behaviors for Virtual Worlds.pdf:pdf}, publisher = {Springer, Heidelberg}, title = {{Expressive Behaviors for Virtual Worlds}}, year = {2003} } @article{Jaques2007, abstract = {In this article we describe the use of mental states approach, more specifically the belief-desire-intention (BDI) model, to implement the process of affective diagnosis in an educational environment. We use the psychological OCC model, which is based on the cognitive theory of emotions and is possible to be imple- mented computationally, in order to infer the learners emotions from his actions in the system interface. In our work we profit from the reasoning capacity of the BDI model in order to infer the students appraisal (a cognitive evaluation of a person that elicits an emotion), which allows us to deduce students emotions. The system reasons about an emotion-generating situation and tries to infer the users emotion by using the OCC model. Besides, the BDI model is very adequate to infer and also model students affective states since the emotions have a dynamic nature.}, author = {Jaques, Patricia Augustin and Vicari, Rosa Maria}, doi = {10.1016/j.compedu.2005.09.002}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Jaques, Vicari - 2007 - A BDI approach to infer student’s emotions in an intelligent learning environment.pdf:pdf}, issn = {03601315}, journal = {Computers \& Education}, keywords = {architectures for educational technology,computer,distance education and telelearning,human,intelligent tutoring systems,interactive learning environments,interface,media in education,system}, month = sep, number = {2}, pages = {360--384}, title = {{A BDI approach to infer student’s emotions in an intelligent learning environment}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0360131505001302}, volume = {49}, year = {2007} } @article{Gratch2004, author = {Gratch, Jonathan and Marsella, Stacy C}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gratch, Marsella - 2004 - A domain-independent framework for modeling emotion.pdf:pdf;:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gratch, Marsella - 2004 - A domain-independent framework for modeling emotion(2).pdf:pdf}, journal = {Cognitive Systems Research}, number = {4}, pages = {269--306}, publisher = {Elsevier}, title = {{A domain-independent framework for modeling emotion}}, volume = {5}, year = {2004} } @article{Fretz1966, author = {Fretz, B. R.}, journal = {Journal of Counseling Psychology}, pages = {343}, title = {{Posturalmovement in a counseling dyad}}, volume = {13}, year = {1966} } @article{Mehrabian1996, author = {Mehrabian, Albert}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mehrabian - 1996 - Pleasure-arousal-dominance A general framework for describing and measuring individual differences in temperament.pdf:pdf}, journal = {Current Psychology}, number = {4}, pages = {261--292}, title = {{Pleasure-arousal-dominance: A general framework for describing and measuring individual differences in temperament}}, volume = {14}, year = {1996} } @article{Kendon2002, abstract = {A context-of-use study is reported of the `head shake'. A large number of examples are described and compared, drawn from video recordings of naturally occasioned interactions in the circumstances of everyday life, made in Campania, Italy, central England and the Eastern United States. Eight different kinds of uses for the head shake are illustrated. It is concluded that the head shake is not to be understood simply as the kinesic equivalent of a unit of verbal expression. It appears as an expression in its own right which, furthermore, the speaker uses as a component in the construction of an utterance which, it seems, is so often a multimodal construction in which the different modalities of expression available are deployed by the speaker in the course of building a unit of expression according to the rhetorical needs of the interactive moment.}, author = {Kendon, Adam}, doi = {10.1075/gest.2.2.03ken}, journal = {Gesture}, keywords = {conversation,gesture,kinesics,multi-modalcommunication,negation}, number = {2}, pages = {147--182}, title = {{Some uses of the head shake}}, volume = {2}, year = {2002} } @inproceedings{Villagrasa2009, abstract = {In this paper we present a 3D facial animation system named FACe! It is able to generate different expressions of the face throughout punctual and combined activation of Action Units, defined by Facial Acting Coding System (FACS). This system is implemented on a 3D human head controlled by bones, riggers and skinning to deform the geometry. The bone system is implemented in order to move single or combined Action Units, so that they can implement superior layers such as expressions, phonemes, words, emotions and the synchronization of all them together.}, address = {Isla Margarita}, author = {Villagrasa, S and S\'{a}nchez, A Sus\'{\i}n}, booktitle = {IV Ibero-American Symposium in Computer Graphics (SIACG 2009)}, editor = {Rodr\'{\i}guez, O. and Ser\'{o}n, F. and Joan-Arinyo, R. and Madeiras, J. and Rodr\'{\i}guez, J. and Coto, E.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Villagrasa, S\'{a}nchez - 2009 - Face! 3d facial animation system based on facs.pdf:pdf}, pages = {203--209}, title = {{Face! 3d facial animation system based on facs}}, url = {http://hdl.handle.net/2117/6944}, year = {2009} } @inproceedings{Liu2005a, abstract = {As an increasing number of new technologies are turning a strong focus on health assessment applications, new engineering and design challenges emerge. Challenges such as inference, modeling, data mining, and feedback for long-term usage arise. This paper argues that embedding empathy into the design of these interactive systems can potentially be vital in the acceptance and success of these types of technologies. This paper discusses three pieces of work that illustrate that designing systems that are intentionally empathetic can play a significant role in creating a better user experience in human-computer interactions.}, author = {Liu, K and Picard, Rosalind W.}, booktitle = {CHI Workshop on Challenges in Health Assessment}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Liu, Picard - 2005 - Embedded empathy in continuous, interactive health assessment.pdf:pdf}, pages = {1--4}, title = {{Embedded empathy in continuous, interactive health assessment}}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.67.7721\&rep=rep1\&type=pdf}, year = {2005} } @article{Bailenson2005, abstract = {Previous research demonstrated social influence resulting from mimicry (the chameleon effect); a confederate who mimicked participants was more highly regarded than a confederate who did not, despite the fact that participants did not explicitly notice the mimicry. In the current study, participants interacted with an embodied artificial intelligence agent in immersive virtual reality. The agent either mimicked a participant's head movements at a 4-s delay or utilized prerecorded movements of another participant as it verbally presented an argument. Mimicking agents were more persuasive and received more positive trait ratings than nonmimickers, despite participants' inability to explicitly detect the mimicry. These data are uniquely powerful because they demonstrate the ability to use automatic, indiscriminate mimicking (i.e., a computer algorithm blindly applied to all movements) to gain social influence. Furthermore, this is the first study to demonstrate social influence effects with a nonhuman, nonverbal mimicker.}, author = {Bailenson, Jeremy N and Yee, Nick}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bailenson, Yee - 2005 - Digital Chameleons Automatic Assimilation of Nonverbal Gestures in Immersive Virtual Environments.pdf:pdf}, journal = {Psychological Science}, number = {10}, pages = {814--819}, title = {{Digital Chameleons: Automatic Assimilation of Nonverbal Gestures in Immersive Virtual Environments}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/16181445}, volume = {16}, year = {2005} } @inproceedings{Alexander2009, author = {Alexander, Oleg and Rogers, Mike and Lambeth, William and Chiang, Matt and Debevec, Paul}, booktitle = {Conference for Visual Media Production (CVMP '09)}, doi = {10.1109/CVMP.2009.29}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Alexander et al. - 2009 - Creating a Photoreal Digital Actor The Digital Emily Project.pdf:pdf}, isbn = {978-1-4244-5257-6}, month = nov, pages = {176--187}, publisher = {IEEE Computer Society}, title = {{Creating a Photoreal Digital Actor: The Digital Emily Project}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5430064}, year = {2009} } @inproceedings{Doherty2004, address = {Madison, WI}, author = {Doherty, William Joseph}, booktitle = {Policy Institute for Family Impact Seminars}, title = {{A family-focused approach to health care [Wisconsin Family Impact seminars]}}, year = {2004} } @article{Paiva2005a, author = {Paiva, Ana and Dias, Jo\~{a}o and Sobral, Daniel and Aylett, Ruth and Woods, Sarah and Hall, Lynne and Zoll, Carsten}, doi = {10.1080/08839510590910165}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Paiva et al. - 2005 - Learning By Feeling Evoking Empathy With Synthetic Characters(2).pdf:pdf}, issn = {0883-9514}, journal = {Applied Artificial Intelligence}, month = mar, number = {3-4}, pages = {235--266}, title = {{Learning By Feeling: Evoking Empathy With Synthetic Characters}}, url = {http://www.tandfonline.com/doi/abs/10.1080/08839510590910165}, volume = {19}, year = {2005} } @inproceedings{Simon2010, author = {Simon, Tomas and Nguyen, Minh Hoai and {De La Torre}, Fernando and Cohn, Jeffrey F.}, booktitle = {IEEE Computer Society Conference on Computer Vision and Pattern Recognition}, doi = {10.1109/CVPR.2010.5539998}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Simon et al. - 2010 - Action unit detection with segment-based SVMs.pdf:pdf}, isbn = {978-1-4244-6984-0}, month = jun, pages = {2737--2744}, publisher = {IEEE}, title = {{Action unit detection with segment-based SVMs}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5539998}, year = {2010} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @book{Osgood1990, author = {Osgood, C.E.}, publisher = {Praeger Publishers}, title = {{Language Meaning, and Culture: The Selected Papers of C.E. Osgood}}, year = {1990} } @article{Fasel2002, author = {Fasel, Beat and Luettin, Juergen}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Fasel, Luettin - 2002 - Automatic Facial Expression Analysis A Syrvey.pdf:pdf}, journal = {Pattern Recognition}, number = {1}, pages = {259--275}, title = {{Automatic Facial Expression Analysis: A Syrvey}}, volume = {36}, year = {2002} } @inproceedings{Bee2009, abstract = {Editing facial expressions of virtual characters is quite a complex task. The face is made up of many muscles, which are partly activated concurrently. Virtual faces with human expressiveness are usually designed with a limited amount of facial regulators. Such regulators are derived from the facial muscle parts that are concurrently activated. Common tools for editing such facial expressions use slider-based interfaces where only a single input at a time is possible. Novel input devices, such as gamepads or data gloves, which allow parallel editing, could not only speed up editing, but also simplify the composition of new facial expressions. We created a virtual face with 23 facial controls and connected it with a slider-based GUI, a gamepad, and a data glove. We first conducted a survey with professional graphics designers to find out how the latter two new input devices would be received in a commercial context. A second comparative study with 17 subjects was conducted to analyze the performance and quality of these two new input devices using subjective and objective measurements.}, address = {Sanibel Island, Florida, USA}, author = {Bee, Nikolaus and Falk, Bernhard and Andre, Elisabeth}, booktitle = {Proceedings of the 14th international conference on Intelligent User Interfaces (IUI '09)}, doi = {10.1145/1502650.1502680}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bee, Falk, Andre - 2009 - Simplified Facial Animation Control Utilizing Novel Input Devices A Comparative Study.pdf:pdf}, isbn = {9781605583310}, pages = {197--206}, publisher = {ACM}, title = {{Simplified Facial Animation Control Utilizing Novel Input Devices : A Comparative Study}}, year = {2009} } @article{Russell1980, abstract = {Factor-analytic evidence has led most psychologists to describe affect as a set of dimensions, such as displeasure, distress, depression, excitement, and so on, with each dimension varying independently of the others. However, there is other evidence that rather than being independent, these affective dimensions are interrelated in a highly systematic fashion. The evidence suggests that these interrelationships can be represented by a spatial model in which affective concepts fall in a circle in the following order: pleasure (0), excitement (45), arousal (90), distress (135), displeasure (180), depression (225), sleepiness (270), and relaxation (315). This model was offered both as a way psychologists can represent the structure of affective experience, as assessed through self-report, and as a representation of the cognitive structure that laymen utilize in conceptualizing affect. Supportive evidence was obtained by scaling 28 emotion-denoting adjectives in 4 different ways: R. T. Ross's (1938) technique for a circular ordering of variables, a multidimensional scaling procedure based on perceived similarity among the terms, a unidimensional scaling on hypothesized pleasure–displeasure and degree-of-arousal dimensions, and a principal-components analysis of 343 Ss' self-reports of their current affective states. (70 ref) (PsycINFO Database Record (c) 2010 APA, all rights reserved)}, author = {Russell, James A}, doi = {10.1037/h0077714}, journal = {Journal of Personality and Social Psychology}, number = {6}, pages = {1161--1178}, title = {{A circumplex model of affect}}, url = {http://psycnet.apa.org/psycinfo/1981-25062-001}, volume = {39}, year = {1980} } @inproceedings{Strapparava2004, abstract = {In this paper we present a linguistic resource for the lexical representation of affective knowledge. This resource (named WORDNETAFFECT) was developed starting from WORDNET, through a selection and tagging of a subset of synsets representing the affective meanings.}, author = {Strapparava, Carlo and Valitutti, Alessandro}, booktitle = {Proceedings of LREC}, number = {March}, organization = {ELRA}, pages = {1083--1086}, publisher = {Citeseer}, title = {{WordNet-Affect: an affective extension of WordNet}}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.122.4281\&rep=rep1\&type=pdf}, volume = {4}, year = {2004} } @article{Breitfuss2009, abstract = {This paper presents a system capable of automatically adding ges- tures to an embodied virtual character processing information from a simple text input. Gestures are generated based on the analysis of linguistic and contex- tual information of the input text. The system is embedded in the virtual world called second life and consists of an in world object and an off world server component that handles the analysis. Either a user controlled avatar or a non user controlled character can be used to display the gestures, that are timed with speech output from an Text-to-Speech system, and so show non verbal behavior without pushing the user to manually select it.}, author = {Breitfuss, Werner and Prendinger, Helmut and Ishizuka, Mitsuru}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Breitfuss, Prendinger, Ishizuka - 2009 - Automatic generation of non-verbal behavior for agents in virtual worlds A system for supportin.pdf:pdf}, journal = {Online Communities and Social Computing}, keywords = {animated agent systems,embodied virtual characters,multimodal,multimodal presentations,output generation,virtual worlds}, pages = {153--161}, title = {{Automatic generation of non-verbal behavior for agents in virtual worlds: A system for supporting multimodal conversations of bots and avatars}}, url = {http://www.springerlink.com/index/V8112R75N1830666.pdf}, volume = {LNCS 5621}, year = {2009} } @article{Shapiro1992, abstract = {Evaluated the effect of varied physician affect on subject recall, anxiety, and perceptions in a simulated tense and ambiguous medical situation. Forty women at risk for breast cancer viewed videotapes of an oncologist presenting-with either worried or nonworried affect-mammogram results. Although the mammogram results and the oncologist were the same in both presentation, analyses indicated that, compared to the women receiving the results from a nonworried physician, the women receiving the results from a worried physician recalled significantly less information, perceived the clinical situation as significantly more severe, reported significantly higher levels of state anxiety, and had significantly higher pulse rates. These results suggest that physician affect plays a critical role in patient reaction to medical information. Implications for compliance research, patient satisfaction, and physician training are discussed.}, author = {Shapiro, D E and Boggs, S R and Melamed, B G and Graham-Pole, J}, institution = {Department of Clinical and Health Psychology, University of Florida, Gainesville 32610.}, journal = {Health psychology official journal of the Division of Health Psychology American Psychological Association}, keywords = {adult,affect,anxiety,anxiety psychology,arousal,breast neoplasms,breast neoplasms genetics,breast neoplasms prevention \& control,breast neoplasms psychology,female,humans,mammography,mammography psychology,mental recall,middle aged,patient education topic,risk factors}, number = {1}, pages = {61--66}, pmid = {1559536}, title = {{The effect of varied physician affect on recall, anxiety, and perceptions in women at risk for breast cancer: an analogue study.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/1559536}, volume = {11}, year = {1992} } @inproceedings{Sullins2009, abstract = {In this paper we explored the relationship between learning gains and affective displays of an animated pedagogical agent. Students read information on the topic of computer literacy while receiving either positive or negative affective responses from an on-screen animated agent. Analyses revealed that only students with low prior knowledge were influenced by the emotion displayed by the animated agent. We discuss the generalizability of our findings to other domains and the implications of these results on intelligent tutoring systems that are emotionally intelligent.}, author = {Sullins, Jeremiah and Craig, Scotty D and Graesser, Arthur C}, booktitle = {Proceedings of the 2009 conference on Artificial Intelligence in Education: Building Learning Systems that Care: From Knowledge Representation to Affective Modelling}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sullins, Craig, Graesser - 2009 - Tough Love The Influence of an Agent ’ s Negative Affect on Students ’ Learning.pdf:pdf}, keywords = {affect,animated agents,emotion,prior knowledge}, pages = {677--679}, publisher = {IOS Press Amsterdam, The Netherlands}, title = {{Tough Love : The Influence of an Agent ’ s Negative Affect on Students ’ Learning}}, year = {2009} } @inproceedings{Bickmore2009a, address = {Budapest, Hungary}, author = {Bickmore, Timothy and Schulman, Daniel}, booktitle = {Proceedings of 8th International Conference on Autonomous Agents and Multiagent Systems (AAMAS'09)}, editor = {Decker and Sichman and Sierra and Castelfranchi}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bickmore, Schulman - 2009 - A virtual laboratory for studying long-term relationships between humans and virtual agents.pdf:pdf}, pages = {297--304}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems (www.ifaamas.org)}, title = {{A virtual laboratory for studying long-term relationships between humans and virtual agents}}, url = {http://dl.acm.org/citation.cfm?id=1558054}, year = {2009} } @article{Tracy2009, abstract = {In 2 studies, the authors developed and validated of a new set of standardized emotion expressions, which they referred to as the University of California, Davis, Set of Emotion Expressions (UCDSEE). The precise components of each expression were verified using the Facial Action Coding System (FACS). The UCDSEE is the first FACS-verified set to include the three "self-conscious" emotions known to have recognizable expressions (embarrassment, pride, and shame), as well as the 6 previously established "basic" emotions (anger, disgust, fear, happiness, sadness, and surprise), all posed by the same 4 expressers (African and White males and females). This new set has numerous potential applications in future research on emotion and related topics.}, author = {Tracy, Jessica L and Robins, Richard W and Schriber, Roberta A}, institution = {Department of Psychology, University of British Columbia, Vancouver, Bristish Columbia, Canada. jltracy@psych.ubc.ca}, journal = {Emotion Washington Dc}, keywords = {africa,african americans,african americans psychology,cross cultural comparison,emotions,emotions classification,european continental ancestry group,european continental ancestry group psychology,facial expression,female,humans,judgment,judgment classification,male,nonverbal communication,nonverbal communication psychology,pattern recognition,posture,reference standards,self concept,sex factors,visual,visual classification,western,western ethnology}, number = {4}, pages = {554--559}, pmid = {19653779}, title = {{Development of a FACS-verified set of basic and self-conscious emotion expressions.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19653779}, volume = {9}, year = {2009} } @article{Lafrance1976, author = {Lafrance, Marianne and Broadbent, M.}, doi = {10.1177/105960117600100307}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lafrance, Broadbent - 1976 - Group Rapport Posture Sharing as a Nonverbal Indicator.pdf:pdf}, isbn = {1059601176}, issn = {1059-6011}, journal = {Group \& Organization Management}, month = sep, number = {3}, pages = {328--333}, title = {{Group Rapport: Posture Sharing as a Nonverbal Indicator}}, volume = {1}, year = {1976} } @article{Pelachaud1996, abstract = {This article reports results from a program that produces high-quality animation of facial expressions and head movements as automatically as possible in conjunction with meaning-based speech synthesis, including spoken intonation. The goal of the research is as much to test and define our theories of the formal semantics for such gestures, as to produce convincing animation. Towards this end, we have produced a high-level programming language for three-dimensional (3-D) animation of facial expressions. We have been concerned primarily with expressions conveying information correlated with the intonation of the voice: This includes the differences of timing, pitch, and emphasis that are related to such semantic distinctions of discourse as "focus," "topic," and "comment," "theme" and "rheme," or "given" and "new" information. We are also interested in the relation of affect or emotion to facial expression. Until now, systems have not embodied such rule-governed translation from spoken utterance meaning to facial expressions. Our system embodies rules that describe and coordinate these relations: ntonation/information, intonation/affect, and facial expressions/affect. A meaning representation includes discourse information: What is contrastive/background information in the given context, and what is the "topic" or "theme" of the discourse? The system maps the meaning representation into how accents and their placement are chosen, how they are conveyed over facial expression, and how speech and facial expressions are oordinated. This determines a sequence of functional groups: lip shapes, conversational signals, punctuators, regulators, and manipulators. Our algorithms then impose synchrony, create coarticulation effects, and determine affectual signals, eye and head movements. The lowest level representation is the Facial Action Coding System (FACS), which makes the generation system portable to other facial models.}, author = {Pelachaud, Catherine and Badler, Norman I and Steedman, Mark}, doi = {10.1207/s15516709cog2001\_1}, issn = {03640213}, journal = {Cognitive Science}, number = {1}, pages = {1--46}, publisher = {Elsevier}, title = {{Generating Facial Expressions for Speech}}, url = {http://www.sciencedirect.com/science/article/pii/S0364021399800019}, volume = {20}, year = {1996} } @article{Chavhan2010, author = {Chavhan, Yashpalsing and Dhore, M. L. and Yesaware, Pallavi}, doi = {10.5120/431-636}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Chavhan, Dhore, Yesaware - 2010 - Speech Emotion Recognition using Support Vector Machine.pdf:pdf}, issn = {09758887}, journal = {International Journal of Computer Applications}, keywords = {emotion recognition,mfcc and,speech emotion,svm}, month = feb, number = {20}, pages = {8--11}, title = {{Speech Emotion Recognition using Support Vector Machine}}, url = {http://www.ijcaonline.org/journal/number20/pxc387636.pdf}, volume = {1}, year = {2010} } @article{Wehrle2000, author = {Wehrle, T. and Kaiser, S. and Schmidt, S. and Scherer, K. R}, doi = {10.1037/0022-3514.78.1.105}, journal = {Journal of Personality and Social Psychology}, pages = {105--119}, title = {{Studying the dynamics of emotional expression using synthesized facial muscle movements}}, volume = {78}, year = {2000} } @phdthesis{Jacques1996, author = {Jacques, R.D.}, school = {South Bank University, London}, title = {{The Nature of Engagement and its Role in Hypermedia Evaluation and Design}}, type = {Doctral Dissertation}, year = {1996} } @incollection{Gratch2006a, author = {Gratch, Jonathan and Mao, W and Marsella, Stacy C}, booktitle = {Cognition and Multi-Agent Interaction: From Cognitive Modeling to Social Simulation}, chapter = {9}, doi = {http://dx.doi.org/10.1017/CBO9780511610721.010}, editor = {Sun, Ron}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gratch, Mao, Marsella - 2006 - Modeling social emotions and social attributions.pdf:pdf;:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gratch, Mao, Marsella - 2006 - Modeling social emotions and social attributions(2).pdf:pdf}, isbn = {9780511610721}, pages = {219--251}, publisher = {Cambridge University Press}, title = {{Modeling social emotions and social attributions}}, year = {2006} } @inproceedings{Kang2009a, address = {Boston, Massachusetts, USA}, author = {Kang, Sin-hwa and Gratch, Jonathan and Watt, James H}, booktitle = {Proceedings of the SIGCHI Conference on Human Factors in Computing Systems 2009 (CHI'09)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kang, Gratch, Watt - 2009 - The Effect of Affective Iconic Realism on Anonymous Interactants ’ ’ Self-Disclosure.pdf:pdf}, isbn = {9781605582474}, keywords = {affective behavior,anonymity,anticipated future,avatar realism,embodied virtual agents,interaction,presence,rapport,self-disclosure,social,virtual humans}, publisher = {ACM Press}, title = {{The Effect of Affective Iconic Realism on Anonymous Interactants ’ ’ Self-Disclosure}}, year = {2009} } @inproceedings{Valstar2010, address = {Malta}, author = {Valstar, M and Pantic, Maja}, booktitle = {Proceedings of Int’l Conf. Language Resources and Evaluation, Workshop on EMOTION}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Valstar, Pantic - 2010 - Induced Disgust, Happiness and Surprise an Addition to the MMI Facial Expression Database.pdf:pdf}, pages = {65--70}, title = {{Induced Disgust, Happiness and Surprise: an Addition to the MMI Facial Expression Database}}, url = {http://lrec.elra.info/proceedings/lrec2010/workshops/W24.pdf\#page=73}, year = {2010} } @incollection{Haq2010, address = {University of Surrey, UK}, author = {Haq, Sanaul and Jackson, Philip J B}, booktitle = {Machine Audition: Principles, Algorithms and Systems}, chapter = {17}, doi = {10.4018/978-1-61520-919-4.ch017}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Haq, Jackson - 2010 - Multimodal Emotion Recognition.pdf:pdf}, publisher = {Information Science Reference}, title = {{Multimodal Emotion Recognition}}, year = {2010} } @article{Li2012, author = {Li, Zheng and Mao, Xia}, doi = {10.1016/j.jvlc.2012.06.001}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Li, Mao - 2012 - Emotional eye movement generation based on Geneva Emotion Wheel for virtual agents.pdf:pdf}, issn = {1045926X}, journal = {Journal of Visual Languages \& Computing}, keywords = {Computer animation,Eye movement synthesis,Human–computer interaction,Virtual agents}, month = oct, number = {5}, pages = {299--310}, publisher = {Elsevier}, title = {{Emotional eye movement generation based on Geneva Emotion Wheel for virtual agents}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S1045926X1200047X}, volume = {23}, year = {2012} } @inproceedings{Lehmann2012, abstract = {Our research goal is to provide a better understanding of how users engage with online services, and how to measure this engagement. We should not speak of one main approach to measure user engagement – e.g. through one fixed set of metrics – because engagement depends on the online services at hand. Instead, we should be talking of models of user engagement. As a first step, we analysed a number of online services, and show that it is possible to derive effectively simple models of user engagement, for example, accounting for user types and temporal aspects. This paper provides initial insights into engagement patterns, allowing for a better understanding of the important characteristics of how users repeatedly interact with a service or group of services.}, address = {Montreal, Canada}, author = {Lehmann, Janette and Lalmas, Mounia and Yom-Tov, Elad and Dupret, Georges}, booktitle = {Proceedings of the 20th international conference on User Modeling, Adaptation, and Personalization (UMAP'12)}, doi = {10.1007/978-3-642-31454-4\_14}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lehmann et al. - 2012 - Models of User Engagement.pdf:pdf}, pages = {164--175}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{Models of User Engagement}}, url = {http://link.springer.com/chapter/10.1007/978-3-642-31454-4\_14}, year = {2012} } @inproceedings{Johnsen2010, address = {Philadelphia, PA}, author = {Johnsen, Kyle and Beck, Diane and Lok, Benjamin}, booktitle = {Proceedings of the 10th International Conference on Intelligent Virtual Agents (IVA'10)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Johnsen, Beck, Lok - 2010 - The Impact of a Mixed Reality Display Configuration on User Behavior With a Virtual Human.pdf:pdf}, keywords = {embodied agents,human-centered computing,virtual humans}, pages = {42--48}, publisher = {Springer}, title = {{The Impact of a Mixed Reality Display Configuration on User Behavior With a Virtual Human}}, url = {http://link.springer.com/chapter/10.1007/978-3-642-15892-6\_5}, year = {2010} } @article{Brown1996, abstract = {As part of a longitudinal study, 47 children who were tested on their understanding of basic emotions when they were 3 years old were tested again at 6 on their understanding of conflicting emotions. Significant stability in individual differences was found over this 3-year period. Antecedents to emotion understanding at 3 continued to be significantly related to children's understanding at 6, including participation in discourse about causality, positive interaction with older siblings, and language ability. Girls outperformed boys, and there was a suggestion that the girls' understanding of emotions was more closely associated than the boys' with the quality of their sibling relationships. Children's concurrent reports of negative experiences at home and at school were related to their sensitivity to the experience of ambivalent emotions. The results are discussed with respect to the social origins and implications of this core aspect of children's social-cognitive development.}, author = {Brown, J R and Dunn, J}, institution = {University of Texas at Austin, USA.}, journal = {Child Development}, keywords = {child,child development,cognitive dissonance,concept formation,emotions,female,gender identity,humans,longitudinal studies,male,preschool,sibling relations,social environment,socialization}, number = {3}, pages = {789--802}, pmid = {8706526}, title = {{Continuities in emotion understanding from three to six years.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/8706526}, volume = {67}, year = {1996} } @book{Ekman1978, author = {Ekman, Paul and Freisen, Wallace V.}, booktitle = {Consulting Psychologists Press 1978}, editor = {Press, Consulting Psychologists}, publisher = {Consulting Psychologists Press}, title = {{Facial Action Coding System: A Technique for the Measurement of Facial Movement}}, year = {1978} } @incollection{Battista2000, abstract = {MPEG-4 (formally ISO/IEC international standard 14496) defines a multimedia system for the interoperable communication of complex scenes containing audio, video, synthetic audio and graphics material. In this article, we provide a comprehensive overview of the technical elements of the Moving Pictures Expert Group's MPEG-4 multimedia system specification}, author = {Battista, S and Casalino, F and Lande, C}, booktitle = {IEEE Multimedia}, doi = {10.1109/93.839314}, issn = {1070986X}, number = {1}, pages = {74--83}, publisher = {IEEE}, title = {{MPEG-4: a multimedia standard for the third millennium, Part 1.}}, volume = {7}, year = {2000} } @inproceedings{DeCarlo2002, abstract = {People highlight the intended interpretation of their utterances within a larger discourse by a diverse set of nonverbal signals. These signals represent a key chal- lenge for animated conversational agents because they are pervasive, variable, and need to be coordinated ju- diciously in an effective contribution to conversation. In this paper, we describe a freely-available cross-platform real-time facial animation system, RUTH, that animates such high-level signals in synchrony with speech and lip movements. RUTH adopts an open, layered archi- tecture in which fine-grained features of the animation can be derived by rule from inferred linguistic structure, allowing us to use RUTH, in conjunction with annota- tion of observed discourse, to investigate the meaningful high-level elements of conversational facial movement for American English speakers.}, author = {DeCarlo, D. and Revilla, C. and Stone, Matthew and Venditti, J.J.}, booktitle = {Proceedings of Computer Animation 2002 (CA 2002)}, doi = {10.1109/CA.2002.1017501}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/DeCarlo et al. - 2002 - Making discourse visible coding and animating conversational facial displays.pdf:pdf}, isbn = {0-7695-1594-0}, number = {Ca}, pages = {11--16}, publisher = {IEEE Comput. Soc}, title = {{Making discourse visible: coding and animating conversational facial displays}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=1017501}, volume = {2002}, year = {2002} } @book{Laurel1993, address = {Reading, MA}, author = {Laurel, B.}, pages = {112--113}, publisher = {Addison-Wesley}, title = {{Computers as Theatre}}, year = {1993} } @article{VandenHaak2009, author = {van den Haak, Maaike J. and de Jong, Menno D.T. and Schellens, Peter Jan}, doi = {10.1016/j.giq.2007.11.003}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/van den Haak, de Jong, Schellens - 2009 - Evaluating municipal websites A methodological comparison of three think-aloud variants.pdf:pdf}, issn = {0740624X}, journal = {Government Information Quarterly}, month = jan, number = {1}, pages = {193--202}, publisher = {Elsevier Inc.}, title = {{Evaluating municipal websites: A methodological comparison of three think-aloud variants}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0740624X08001159}, volume = {26}, year = {2009} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{XinLuo2007, abstract = {The present study investigated the ability of normal-hearing listeners and cochlear implant users to recognize vocal emotions. Sentences were produced by 1 male and 1 female talker according to 5 target emotions: angry, anxious, happy, sad, and neutral. Overall amplitude differences between the stimuli were either preserved or normalized. In experiment 1, vocal emotion recognition was measured in normal-hearing and cochlear implant listeners; cochlear implant subjects were tested using their clinically assigned processors. When overall amplitude cues were preserved, normal-hearing listeners achieved near-perfect performance, whereas listeners with cochlear implant recognized less than half of the target emotions. Removing the overall amplitude cues significantly worsened mean normal-hearing and cochlear implant performance. In experiment 2, vocal emotion recognition was measured in listeners with cochlear implant as a function of the number of channels (from 1 to 8) and envelope filter cutoff frequency (50 vs 400 Hz) in experimental speech processors. In experiment 3, vocal emotion recognition was measured in normal-hearing listeners as a function of the number of channels (from 1 to 16) and envelope filter cutoff frequency (50 vs 500 Hz) in acoustic cochlear implant simulations. Results from experiments 2 and 3 showed that both cochlear implant and normal-hearing performance significantly improved as the number of channels or the envelope filter cutoff frequency was increased. The results suggest that spectral, temporal, and overall amplitude cues each contribute to vocal emotion recognition. The poorer cochlear implant performance is most likely attributable to the lack of salient pitch cues and the limited functional spectral resolution.}, author = {{Xin Luo} and Fu, Qian-Jie and Galvin, John J}, doi = {10.1177/1084713807305301}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Xin Luo, Fu, Galvin - 2007 - Vocal emotion recognition by normal-hearing listeners and cochlear implant users.pdf:pdf}, issn = {1084-7138}, journal = {Trends in amplification}, keywords = {Aged,Auditory Perception,Cochlear Implants,Cues,Emotions,Female,Hearing Disorders,Hearing Disorders: psychology,Hearing Disorders: surgery,Hearing Impaired Persons,Humans,Male,Middle Aged,Pitch Perception,Rehabilitation of Hearing Impaired,Speech Acoustics,Speech Perception,Time Factors}, month = dec, number = {4}, pages = {301--15}, pmid = {18003871}, title = {{Vocal emotion recognition by normal-hearing listeners and cochlear implant users.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=3210149\&tool=pmcentrez\&rendertype=abstract}, volume = {11}, year = {2007} } @article{Paiva2005, author = {Paiva, Ana and Dias, Jo\~{a}o and Sobral, Daniel and Aylett, Ruth and Woods, Sarah and Hall, Lynne and Zoll, Carsten}, doi = {10.1080/08839510590910165}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Paiva et al. - 2005 - Learning By Feeling Evoking Empathy With Synthetic Characters.pdf:pdf}, issn = {0883-9514}, journal = {Applied Artificial Intelligence}, month = mar, number = {3-4}, pages = {235--266}, title = {{Learning By Feeling: Evoking Empathy With Synthetic Characters}}, url = {http://www.tandfonline.com/doi/abs/10.1080/08839510590910165}, volume = {19}, year = {2005} } @article{Sharma1998, author = {Sharma, R. and Pavlovic, V.I. and Huang, Thomas S.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sharma, Pavlovic, Huang - 1998 - Toward multimodal human-computer interface.pdf:pdf}, journal = {Proceedings of the IEEE}, keywords = {computer interface,human,multimodality,sensor}, number = {5}, pages = {853--869}, publisher = {IEEE}, title = {{Toward multimodal human-computer interface}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=664275}, volume = {86}, year = {1998} } @inproceedings{Zakharov2007, abstract = {We describe the design and evaluation of an affective pedagogical agent persona for Intelligent Tutoring Systems. The goal of our research was to develop an agent embodying a persona of a caring mentor interested in the learner's progress. The agent's behaviour is guided by a set of rules that are triggered by the states of the session history. Four agents were integrated with EER-Tutor for a formative evaluation study. The mentor persona secured strong rapport with the users; the audible narration was seen as a strong feature of the agents.}, author = {Zakharov, Konstantin and Mitrovic, Antonija and Johnston, Lucy}, booktitle = {Proceedings of the 2007 conference on Artificial Intelligence in Education: Building Technology Rich Learning Contexts That Work}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Zakharov, Mitrovic, Johnston - 2007 - Pedagogical Agents Trying on a Caring Mentor Role.pdf:pdf}, pages = {59--66}, publisher = {IOS Press Amsterdam, The Netherlands}, title = {{Pedagogical Agents Trying on a Caring Mentor Role}}, url = {http://dl.acm.org/citation.cfm?id=1563616}, year = {2007} } @article{Drolet2000, abstract = {We propose that face-to-face contact fosters the development of rapport and thereby helps negotiators coordinate on mutually beneficial settlements in mixed-motive conflicts. Specifically, we investigate whether, in a cooperative climate, negotiators visual access to each others nonverbal behavior fosters a dyadic state of rapport that facilitates mutual cooperation. Experiment 1 manipulated whether negotiators stood face-to-face or side-by- side (unable to see each other) in a simulated strike negotiation. Face-to-face dyads were more likely to coordinate on a settlement early in the strike, resulting in higher joint gains. An alternative interpretation in terms of an anticipatory effect of face-to-face contact was not supported. Experiment 2 manipulated whether previously unacquainted negotiators conversed face-to-face or by telephone before separating to play a conflict game with the structure of a Prisoners Dilemma game. Face-to-face dyads were more likely to coordinate on high joint gain outcomes. The facilitatory effect of face-to-face contact was statistically mediated by ameasure of dyadic rapport. Results did not support alternative interpretations based on individual-level positive affect or expectations about opponents. We conclude with a discussion of the role of affective and dyad-level processes in social psychological models of conflict resolution.}, author = {Drolet, Aimee L and Morris, Michael W}, doi = {10.1006/jesp.1999.1395}, issn = {00221031}, journal = {Journal of Experimental Social Psychology}, number = {1}, pages = {26--50}, publisher = {ACADEMIC PRESS INC}, title = {{Rapport in Conflict Resolution: Accounting for How Face-to-Face Contact Fosters Mutual Cooperation in Mixed-Motive Conflicts}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0022103199913951}, volume = {36}, year = {2000} } @inproceedings{Swartout2010, abstract = {To increase the interest and engagement of middle school students in science and technology, the \{InterFaces\} project has created virtual museum guides that are in use at the Museum of Science, Boston. The characters use natural language interaction and have near photoreal appearance to increase and presents reports from museum staff on visitor reaction.}, address = {Philadelphia, PA}, author = {Swartout, William and Traum, David and Artstein, Ron and Noren, Dan and Debevec, Paul and Bronnenkant, Kerry and Williams, Josh and Leuski, Anton and Narayanan, Shrikanth and Piepol, Diane and Lane, H. Chad and Morie, Jacquelyn and Aggarwal, Priti and Liewer, Matt and Chiang, Jen-Yuan and Gerten, Jillian and Chu, Selina and White, Kyle}, booktitle = {Proceedings of the 10th International Conference on Intelligent Virtual Agents (IVA 2010)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Swartout et al. - 2010 - Ada and Grace Toward Realistic and Engaging Virtual Museum Guides.pdf:pdf}, keywords = {Graphics,Learning Sciences,Virtual Humans,Virtual Worlds}, title = {{Ada and Grace: Toward Realistic and Engaging Virtual Museum Guides}}, url = {http://ict.usc.edu/pubs/ada and grace.pdf}, year = {2010} } @article{Warner1987, author = {Warner, Rebecca M. and Malloy, Daniel and Schneider, Kathy and Knoth, Russell and Wilder, Bruce}, doi = {10.1007/BF00990958}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Warner et al. - 1987 - Rhythmic organization of social interaction and observer ratings of positive affect and involvement.pdf:pdf}, issn = {0191-5886}, journal = {Journal of Nonverbal Behavior}, number = {2}, pages = {57--74}, title = {{Rhythmic organization of social interaction and observer ratings of positive affect and involvement}}, url = {http://www.springerlink.com/index/10.1007/BF00990958}, volume = {11}, year = {1987} } @article{Behrend2011, abstract = {In this study, trainees worked with computerized trainer agents that were either similar to them or different regarding appearance or feedback-giving style. Similarity was assessed objectively, based on appearance and feedback style matching, and subjectively, based on participants’ self-reported perceptions of similarity. Appearance similarity had few effects. Objective feedback similarity led to higher scores on a declarative knowledge test and higher liking for the trainer. Subjective feedback similarity was related to reactions, engagement, and liking for the trainer. Overall, results indicated that subjective similarity is more important in predicting training outcomes than objective similarity, and that surfacelevel similarity is less important than deep-level similarity. These results shed new light on the dynamics between e-learners and trainer agents, and inform the design of agent-based training.}, author = {Behrend, Tara S. and Thompson, Lori Foster}, doi = {10.1016/j.chb.2010.12.016}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Behrend, Thompson - 2011 - Similarity effects in online training Effects with computerized trainer agents.pdf:pdf}, issn = {07475632}, journal = {Computers in Human Behavior}, keywords = {Intelligent agents Similarity-attraction E-learnin}, month = may, number = {3}, pages = {1201--1206}, publisher = {Elsevier Ltd}, title = {{Similarity effects in online training: Effects with computerized trainer agents}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0747563211000033}, volume = {27}, year = {2011} } @inproceedings{Yasavur2013, abstract = {Named-Entity Recognizers (NERs) are an important part of information extraction systems in annotation tasks. Although substantial progress have been made in recognizing domain-independent named entities (e.g. location, organization and person), there is a need to rec- ognize named entities for domain-specific applications in order to extract relevant concepts. Due to the growing need for smart health applications, to address some of the latestworldwide epidemics of behavioral issues (e.g. over eating, lack of exercise, alcohol and drug consump- tion), we focused on the domain of behavior change, especially lifestyle change. To the best of our knowl- edge, there is no named-entity recognizer designed for the lifestyle change domain to enable applications to rec- ognize relevant concepts.We have designed an ontology for behavioral health based on which we developed a NER augmented with lexical resources. Our NER au- tomatically tags words and phrases in sentences with relevant domain-specific tags (e.g. [un/]healthy food, potentially-risky/healthy activity, drug, tobacco and al- coholic beverage). We tested our system with the man- ually collected test data. Our ontology also enables to make further information acquisition for the recognized named entities by using semantic reasoners.}, address = {St Petersburg, FL, USA}, author = {Yasavur, Ugan and Amini, Reza and Lisetti, Christine and Rishe, Naphtali}, booktitle = {In Proceedings of the 26th International FLAIRS Conference}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Yasavur et al. - 2013 - Ontology-based Named Entity Recognizer for Behavioral Health.pdf:pdf}, publisher = {AAAI Press}, title = {{Ontology-based Named Entity Recognizer for Behavioral Health}}, url = {http://www.aaai.org/ocs/index.php/FLAIRS/FLAIRS13/paper/viewFile/5950/6076}, year = {2013} } @inproceedings{Bickmore2009, abstract = {Ninety million Americans have inadequate health literacy, resulting in a reduced ability to read and follow directions in the healthcare environment. We describe an animated, empathic virtual nurse interface for design rationale, and two Boston University School of Medicine Boston Medical Center brian.jack@bmc.org educating and counseling hospital patients with inadequate health literacy in their hospital beds at the time of discharge. The development methodology, iterations of user testing are described. Results indicate that hospital patients with low health literacy found the system easy to use, reported high levels of satisfaction, and most said they preferred receiving the discharge information from the agent over their doctor or nurse. Patients also expressed appreciation for the time and attention provided by the virtual nurse, and felt that it provided an additional authoritative source for their medical information.}, address = {New York}, author = {Bickmore, Timothy Wallace and Pfeifer, Laura M and Jack, Brian W}, booktitle = {Proceedings of the 27th international ACM conference on Human factors in computing systems (CHI'09)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bickmore, Pfeifer, Jack - 2009 - Taking the Time to Care Empowering Low Health Literacy Hospital Patients with Virtual Nurse Agents.pdf:pdf}, isbn = {9781605582467}, keywords = {Access,Conversational Agent,Embodied,Health Literacy,Hospital Discharge,Patient Education,Patient Safety,Relational Agent,Universal}, pages = {1265--1274}, publisher = {ACM}, title = {{Taking the Time to Care : Empowering Low Health Literacy Hospital Patients with Virtual Nurse Agents}}, year = {2009} } @article{Eisenberg1983, abstract = {Reviews the literature on sex differences in empathy (defined as vicarious affective responding to the emotional state of another) and related capacities (affective role taking and decoding of nonverbal cues). The literature is discussed according to method used to assess empathy and affective role taking. Where appropriate, meta-analyses were also computed. In general, sex differences in empathy were found to be a function of the methods used to assess empathy. There was a large sex difference favoring women when the measure of empathy was self-report scales; moderate differences (favoring females) were found for reflexive crying and self-report measures in laboratory situations; and no sex differences were evident when the measure of empathy was either physiological or unobtrusive observations of nonverbal reactions to another's emotional state. Moreover, few sex differences were found for children's affective role taking and decoding abilities. (156 ref) (PsycINFO Database Record (c) 2006 APA, all rights reserved), (C) 1983 by the American Psychological Association}, author = {Eisenberg, Nancy and Lennon, Randy}, issn = {19391455}, journal = {Psychological Bulletin}, number = {1}, pages = {100--131}, title = {{Sex Differences in Empathy and Related Capacities}}, volume = {94}, year = {1983} } @article{Ochs2010, author = {Ochs, Magalie and Sadek, David and Pelachaud, Catherine}, doi = {10.1007/s10458-010-9156-z}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ochs, Sadek, Pelachaud - 2010 - A formal model of emotions for an empathic rational dialog agent.pdf:pdf}, issn = {1387-2532}, journal = {Autonomous Agents and Multi-Agent Systems}, keywords = {affective computing,dialog,emotions,empathy,rational dialog agent}, month = nov, title = {{A formal model of emotions for an empathic rational dialog agent}}, url = {http://www.springerlink.com/index/10.1007/s10458-010-9156-z}, year = {2010} } @article{Ekman1983, author = {Ekman, Paul and Levenson, Robert W and Freisen, Wallace V.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ekman, Levenson, Freisen - 1983 - Autonomic Nervous System Activity Distinguishes among Emotions.pdf:pdf}, journal = {Science}, number = {4616}, pages = {1208--1210}, title = {{Autonomic Nervous System Activity Distinguishes among Emotions}}, volume = {221}, year = {1983} } @inproceedings{Gama2011, abstract = {Over the last decade extensive research has been conducted in the area of conversational agents focusing in many different aspects of these agents. In this research, and aiming at building agents that maintain a social connection with users, empathy has been one of those areas, as it plays a leading role in the establishment of social relationships. In this paper we present a relationship model of empathy that takes advantage of Social Penetration Theory's concepts for relationship building. This model has been implemented into an agent that attempts to establish a relationship with the user, expressing empathy both verbally and visually. The visual expression of empathy consists of facial expression and physical proximity representation. The user tests performed showed that while users were able to develop a simple relationship with the agents, they however developed stronger relationships with a version of the agent that is most visually expressive and takes advantage of the proximity element, confirming the significance of our model based on social penetration theory may have and, consequently, the importance of the visual representation of empathic responses.}, address = {Memphis, TN, USA}, author = {Gama, Sandra and Barata, Gabriel and Gon\c{c}alves, D. and Prada, R. and Paiva, Ana}, booktitle = {ACII'11 Proceedings of the 4th international conference on Affective computing and intelligent interaction - Volume Part I}, doi = {10.1007/978-3-642-24600-5\_54}, editor = {{D'Mello, Sidney K. and Graesser, Arthur C. and Schuller, Bj\"{o}rn and Martin}, Jean-Claude}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gama et al. - 2011 - SARA social affective relational agent a study on the role of empathy in artificial social agents.pdf:pdf}, keywords = {affective computing,conversational agent,empathic agent}, pages = {507--516}, publisher = {Springer Berlin / Heidelberg}, title = {{SARA: social affective relational agent: a study on the role of empathy in artificial social agents}}, url = {http://www.springerlink.com/content/g0433kx744258w62/}, year = {2011} } @article{Frampton2009, author = {Frampton, Matthew and Lemon, Oliver}, doi = {10.1017/S0269888909990166}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Frampton, Lemon - 2009 - Recent research advances in Reinforcement Learning in Spoken Dialogue Systems.pdf:pdf}, issn = {0269-8889}, journal = {The Knowledge Engineering Review}, month = dec, number = {04}, pages = {375}, title = {{Recent research advances in Reinforcement Learning in Spoken Dialogue Systems}}, url = {http://www.journals.cambridge.org/abstract\_S0269888909990166}, volume = {24}, year = {2009} } @inproceedings{Martinez-miranda2012, author = {Mart\'{\i}nez-miranda, Juan and Bres\'{o}, Adri\'{a}n and Garc\'{\i}a-g\'{o}mez, Juan Miguel}, booktitle = {Proceedings of the 4th International Conference on Agents and Artificial Intelligence}, doi = {10.5220/0003833302640269}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mart\'{\i}nez-miranda, Bres\'{o}, Garc\'{\i}a-g\'{o}mez - 2012 - Modelling Therapeutic Empathy in a Virtual Agent To Support the Remote Treatment of Major.pdf:pdf}, isbn = {978-989-8425-95-9}, pages = {264--269}, publisher = {SciTePress - Science and and Technology Publications}, title = {{Modelling Therapeutic Empathy in a Virtual Agent To Support the Remote Treatment of Major Depression}}, url = {http://www.scitepress.org/DigitalLibrary/Link.aspx?doi=10.5220/0003833302640269}, year = {2012} } @article{Elfenbein2007, author = {Elfenbein, H. A. and Beaupre, M. and Le vesque, M. and Hess, U.}, journal = {Emotion}, number = {1}, pages = {131--146}, title = {{Toward a dialect theory: Cultural differences in the expression and recognition of posed facial expressions}}, volume = {7}, year = {2007} } @article{Breemen2005, abstract = {We developed a robotic research platform called "iCat" for studying social human-robot interaction. The platform consists of the robotic character "iCat", which is a desktop user-interface robot with mechanically rendered facial expressions. Recently, Philips Research made this platform available for universities and research laboratories to stimulate the momentum in Human-Robot Interaction research [5].}, author = {van Breemen, A and Yan, X}, doi = {10.1145/1082473.1082823}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Breemen, Yan - 2005 - iCat an animated user-interface robot with personality.pdf:pdf}, journal = {AAMAS '05 Proceedings of the fourth international joint conference on Autonomous agents and multiagent systems}, pages = {143--144}, title = {{iCat: an animated user-interface robot with personality}}, url = {http://dl.acm.org/citation.cfm?id=1082823}, year = {2005} } @inproceedings{Alexander2013, author = {Alexander, Oleg and Fyffe, Graham and Busch, Jay and Yu, Xueming and Ichikari, Ryosuke and Jones, Andrew and Debevec, Paul and Jimenez, Jorge and Danvoye, Etienne and Antionazzi, Bernardo and Eheler, Mike and Kysela, Zybnek and Pahlen, Javier}, booktitle = {ACM SIGGRAPH '13}, doi = {10.1145/2503385.2503387}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Alexander et al. - 2013 - Digital Ira creating a real-time photoreal digital actor.pdf:pdf}, pages = {4}, publisher = {ACM}, title = {{Digital Ira: creating a real-time photoreal digital actor}}, url = {http://dl.acm.org/citation.cfm?id=2503387}, year = {2013} } @article{Scherer2003, author = {Scherer, Klaus R.}, doi = {10.1016/S0167-6393(02)00084-5}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Scherer - 2003 - Vocal communication of emotion A review of research paradigms.pdf:pdf}, issn = {01676393}, journal = {Speech communication}, month = apr, number = {1-2}, pages = {227--256}, title = {{Vocal communication of emotion: A review of research paradigms}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0167639302000845 http://www.sciencedirect.com/science/article/pii/S0167639302000845}, volume = {40}, year = {2003} } @inproceedings{Becker2005, abstract = {This paper first describes two independently conducted research strands on affective human-computer interaction: one on an emotion simulation system for an expressive 3D humanoid agent called Max, which was designed at the University of Bielefeld; the other one on a real-time system for empathic (agent) feedback that is based on human emotional states derived from physiological information, and developed at the University of Tokyo and the National Institute of Informatics. Then, the integration of both systems is suggested for the purpose of realizing a highly believable agent with empathic qualities.}, address = {Takamatsu, Kagawa, Japan}, author = {Becker-Asano, Christian and Prendinger, Helmut and Ishizuka, M.}, booktitle = {Proceedings of the 2005 International Conference on Active Media Technology, 2005. (AMT 2005)}, doi = {10.1109/AMT.2005.1505417}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Becker-Asano, Prendinger, Ishizuka - 2005 - Empathy for Max.pdf:pdf}, isbn = {0780390350}, keywords = {embodied conversational agents,empa-}, pages = {541 -- 545}, title = {{Empathy for Max}}, url = {http://www.techfak.uni-bielefeld.de/~cbecker/becker-helmut-amt05.pdf}, year = {2005} } @article{Barrett2007, abstract = {Experiences of emotion are content-rich events that emerge at the level of psychological description, but must be causally constituted by neurobiological processes. This chapter outlines an emerging scientific agenda for understanding what these experiences feel like and how they arise. We review the available answers to what is felt (i.e., the content that makes up an experience of emotion) and how neurobiological processes instantiate these properties of experience. These answers are then integrated into a broad framework that describes, in psychological terms, how the experience of emotion emerges from more basic processes. We then discuss the role of such experiences in the economy of the mind and behavior.}, author = {Barrett, Lisa Feldman and Mesquita, Batja and Ochsner, Kevin N and Gross, James J}, doi = {10.1146/annurev.psych.58.110405.085709}, editor = {Meyers, Editor-in-Chief Robert A}, institution = {Department of Psychology, Boston College, Chestnut Hill, Massachusetts 02467, USA. barretli$\backslash$@bc.edu}, isbn = {9780122274107}, issn = {00664308}, journal = {Annual Review of Psychology}, keywords = {affect,consciousness,emotion}, number = {1}, pages = {373--403}, pmid = {17002554}, publisher = {Annual Reviews}, title = {{The Experience of Emotion}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17002554}, volume = {58}, year = {2007} } @article{Hancock2007, abstract = {Our ability to express and accurately assess emotional states is central to human life. The present study examines how people express and detect emotions during text-based communication, an environment that eliminates the nonverbal cues typically associated with emotion. The results from 40 dyadic interactions suggest that users relied on four strategies to express happiness versus sadness, including disagreement, negative affect terms, punctuation, and verbosity. Contrary to conventional wisdom, communication partners readily distinguished between positive and negative valence emotional communicators in this text-based context. The results are discussed with respect to the Social Information Processing model of strategic relational adaptation in mediated communication.}, author = {Hancock, Jeffrey T and Landrigan, Christopher and Silver, Courtney}, doi = {10.1145/1240624.1240764}, isbn = {9781595935939}, journal = {Proceedings of the SIGCHI conference on Human factors in computing systems CHI 07}, pages = {929}, publisher = {ACM Press}, series = {Proceedings of the SIGCHI conference on Human factors in computing systems}, title = {{Expressing emotion in text-based communication}}, url = {http://portal.acm.org/citation.cfm?doid=1240624.1240764}, year = {2007} } @article{Nisbett1977, abstract = {Reviews evidence which suggests that there may be little or no direct introspective access to higher order cognitive processes. Ss are sometimes (a) unaware of the existence of a stimulus that importantly influenced a response, (b) unaware of the existence of the response, and (c) unaware that the stimulus has affected the response. It is proposed that when people attempt to report on their cognitive processes, that is, on the processes mediating the effects of a stimulus on a response, they do not do so on the basis of any true introspection. Instead, their reports are based on a priori, implicit causal theories, or judgments about the extent to which a particular stimulus is a plausible cause of a given response. This suggests that though people may not be able to observe directly their cognitive processes, they will sometimes be able to report accurately about them. Accurate reports will occur when influential stimuli are salient and are plausible causes of the responses they produce, and will not occur when stimuli are not salient or are not plausible causes.}, author = {Nisbett, R E and Wilson, T D}, doi = {10.1037/0033-295X.84.3.231}, editor = {DeVivo, Anita and Silver, Amy and Felder, Deborah S and Hayward, Robert J and Patterson, Kendall C and Redman, Anne and Buchwald, Alexander and Falmagne, Rachel Jofffe and Krantz, David H and Olson, Gary M and Shiffrin, Richard M and Smith, Edward E and Theios, John and WIggins, Jerry S}, issn = {0033295X}, journal = {Psychological Review}, number = {3}, pages = {231--259}, pmid = {17882490}, publisher = {Psychol Rev}, title = {{Telling more than we can know: Verbal reports on mental processes}}, url = {http://psycnet.apa.org/journals/rev/84/3/231/}, volume = {84}, year = {1977} } @article{Gruen1986, author = {Gruen, Rand J. and Mendelsohn, Gerald}, doi = {10.1037/0022-3514.51.3.609}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gruen, Mendelsohn - 1986 - Emotional responses to affective displays in others The distinction between empathy and sympathy.pdf:pdf}, issn = {1939-1315}, journal = {Journal of Personality and Social Psychology}, number = {3}, pages = {609--614}, title = {{Emotional responses to affective displays in others: The distinction between empathy and sympathy.}}, volume = {51}, year = {1986} } @article{Premack1978, abstract = {An individual has a theory of mind if he imputes mental states to himself and others. A system of inferences of this kind is properly viewed as a theory because such states are not directly observable, and the system can be used to make predictions about the behavior of others. As to the mental states the chimpanzee may infer, consider those inferred by our own species, for example, purpose or intention, as well as knowledge, belief, thinking, doubt, guessing, pretending, liking, and so forth. To determine whether or not the chimpanzee infers states of this kind, we showed an adult chimpanzee a series of videotaped scenes of a human actor struggling with a variety of problems. Some problems were simple, involving inaccessible food bananas vertically or horizontally out of reach, behind a box, and so forth as in the original Kohler problems; others were more complex, involving an actor unable to extricate himself from a locked cage, shivering because of a malfunctioning heater, or unable to play a phonograph because it was unplugged. With each videotape the chimpanzee was given several photographs, one a solution to the problem, such as a stick for the inaccessible bananas, a key for the locked up actor, a lit wick for the malfunctioning heater. The chimpanzee's consistent choice of the correct photographs can be understood by assuming that the animal recognized the videotape as representing a problem, understood the actor's purpose, and chose alternatives compatible with that purpose.}, author = {Premack, D and Woodruff, G}, doi = {10.1017/S0140525X00076512}, issn = {0140525X}, journal = {Behavioral and Brain Sciences}, number = {04}, pages = {515--526}, pmid = {18424224}, publisher = {Cambridge Univ Press}, title = {{Does the chimpanzee have a theory of mind?}}, url = {http://journals.cambridge.org/abstract\_S0140525X00076512}, volume = {1}, year = {1978} } @article{Lang1995, abstract = {Emotions are action dispositions--states of vigilant readiness that vary widely in reported affect, physiology, and behavior. They are driven, however, by only 2 opponent motivational systems, appetitive and aversive--subcortical circuits that mediate reactions to primary reinforcers. Using a large emotional picture library, reliable affective psychophysiologies are shown, defined by the judged valence (appetitive/pleasant or aversive/unpleasant) and arousal of picture percepts. Picture-evoked affects also modulate responses to independently presented startle probe stimuli. In other words, they potentiate startle reflexes during unpleasant pictures and inhibit them during pleasant pictures, and both effects are augmented by high picture arousal. Implications are elucidated for research in basic emotions, psychopathology, and theories of orienting and defense. Conclusions highlight both the approach's constraints and promising paths for future study.}, author = {Lang, P J}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lang - 1995 - The emotion probe. Studies of motivation and attention.pdf:pdf}, issn = {0003-066X}, journal = {The American psychologist}, keywords = {Affect,Arousal,Attention,Blinking,Humans,Mental Disorders,Mental Disorders: psychology,Motivation,Startle Reaction}, month = may, number = {5}, pages = {372--85}, pmid = {7762889}, title = {{The emotion probe. Studies of motivation and attention.}}, volume = {50}, year = {1995} } @book{Knapp2009, abstract = {The most comprehensive, most readable compendium of research and theory on nonverbal communication available, NONVERBAL COMMUNICATION IN HUMAN INTERACTION uses the cross-disciplinary approaches of speech and social psychology to focus on how nonverbal communication research affects a wide variety of academic interests.}, author = {Knapp, Mark L and Hall, Judith A}, booktitle = {Dress as NonVerbal Communication}, editor = {Burgoon, Michael}, isbn = {9780495568698}, pages = {496}, publisher = {Wadsworth, Cengage Learning}, title = {{Nonverbal Communication in Human Interaction}}, url = {http://books.google.com/books?id=j5HIIfRUPm0C}, volume = {5}, year = {2009} } @book{Wolfgang1979, address = {New York}, author = {Wolfgang, A.}, publisher = {Academic Press}, title = {{Nonverbal behavior: Applications and cultural implications}}, year = {1979} } @article{Cassell2000, abstract = {Embodied conversational agents are computer-generated cartoon-like characters that demonstrate many of the same properties as humans in face-to-face conversation, including the ability to produce and respond to verbal and nonverbal communication. They constitute a type of (a) multimodal interface where the modalities are those natural to human conversation: speech, facial displays, hand gestures, and body stance; (b) software agent, insofar as they represent the computer in an interaction with a human or represent their human users in a computational environment (as avatars, for example); and (c) dialogue system where both verbal and nonverbal devices advance and regulate the dialogue between the user and the computer. With an embodied conversational agent, the visual dimension of interacting with an animated character on a screen plays an intrinsic role. Not just pretty pictures, the graphics display visual features of conversation in the same way that the face and hands do in face-to-face conversation among humans.This book describes research in all aspects of the design, implementation, and evaluation of embodied conversational agents as well as details of specific working systems. Many of the chapters are written by multidisciplinary teams of psychologists, linguists, computer scientists, artists, and researchers in interface design. The authors include Elisabeth Andre, Norm Badler, Gene Ball, Justine Cassell, Elizabeth Churchill, James Lester, Dominic Massaro, Cliff Nass, Sharon Oviatt, Isabella Poggi, Jeff Rickel, and Greg Sanders.}, author = {Cassell, Justine and Sullivan, Joseph and Prevost, Scott and Churchill, Elizabeth F.}, chapter = {Evaluation}, doi = {10.1027/1864-9335.40.1.26}, editor = {Cassell, Justine and Sullivan, Joseph and Prevost, Scott and Churchill, Elizabeth}, isbn = {0262032783}, issn = {18649335}, journal = {Social Psychology}, number = {1}, pages = {26--36}, publisher = {MIT Press}, title = {{Embodied Conversational Agents}}, url = {http://psycontent.metapress.com/openurl.asp?genre=article\&id=doi:10.1027/1864-9335.40.1.26}, volume = {40}, year = {2000} } @article{Breazeal2005, author = {Breazeal, Cynthia and Buchsbaum, Daphna and Gray, Jesse and Gatenby, David and Blumberg, Bruce}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Breazeal et al. - 2005 - Learning from and about others Towards using imitation to bootstrap the social understanding of others by robot.pdf:pdf}, journal = {Artificial Life}, number = {1-2}, pages = {31--62}, publisher = {MIT Press}, title = {{Learning from and about others: Towards using imitation to bootstrap the social understanding of others by robots}}, volume = {11}, year = {2005} } @book{Lang1997, abstract = {A simple and rapid liquid chromatographic assay for the evaluation of potentially counterfeit oseltamivir (Tamiflu has been developed and assessed. The assay uses approximately 1mg Tamiflu powder when used for authentication and content estimate. The procedure was validated using 50 replicates analysed during five independent series with a total R.S.D. of 11.2\%. The assay can also be used to monitor the exact content of oseltamivir in Tamiflu capsules. One Tamiflu capsule was transferred to a 250mL volumetric flask and 150mL water was added. The flask was placed in an ultrasonic bath at 40 degrees C for 20min to dissolve the capsule. The solution was allowed to cool to room temperature before the flask was filled up to the mark (250mL). A small aliquot was centrifuged and then directly injected into the LC-system for quantification. Oseltamivir was analysed by liquid chromatography with UV detection on a Hypersil Gold column (150mmx4.6mm) using a mobile phase containing methanol-phosphate buffer (pH 2.5; 0.1M) (50:50, v/v) at a flow rate of 1.0mL/min. The assay was implemented for the analysis of Tamiflu purchased over the Internet and at local pharmacies in Thailand and Vietnam.}, author = {Lang, P J and Bradley, Margaret M and Cuthbert, B N}, booktitle = {Psychology}, doi = {10.1016/j.epsr.2006.03.016}, institution = {University of Florida}, issn = {07317085}, number = {4}, pages = {1--5}, pmid = {8625375}, publisher = {The Center for Research in Psychophysiology, University of Florida}, title = {{International Affective Picture System (IAPS): Technical Manual and Affective Ratings}}, url = {http://www.unifesp.br/dpsicobio/adap/instructions.pdf}, volume = {77}, year = {1997} } @book{Csikszentmihalyi1990, address = {New York}, author = {Cs\'{\i}kszentmih\'{a}lyi, M.}, publisher = {Harper and Row}, title = {{Flow: The Psychology of Optimal Experience}}, year = {1990} } @inproceedings{Sebe2005b, author = {Sebe, Nicu and Cohen, Ira and Gevers, Theo and Huang, Thomas S.}, booktitle = {Proceedings of the Society of Photo-Optical Instrumentation Engineers, , Vol. 5670}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sebe et al. - 2005 - Multimodal approaches for emotion recognition a survey.pdf:pdf}, keywords = {emotion recognition,human-computer interaction,multimodal approach}, pages = {56--67}, title = {{Multimodal approaches for emotion recognition: a survey}}, url = {http://disi.unitn.it/~sebe/PUBS/PDF/2005/sebeSPIE2005a.pdf}, year = {2005} } @article{Stockwell1994, abstract = {The concept of the Alcohol Dependence Syndrome has been influential in the field of alcohol studies in the 1980s. The Severity of Alcohol Dependence Questionnaire (SADQ) is one of a generation of alcohol problem scales developed to measure degree of dependence rather than presence or absence of 'alcoholism'. This paper describes the development of a form of the SADQ for community samples of drinkers (SADQ-C) and its relationship to a brief scale designed to measure impaired control over drinking. In a sample of 52 problem drinkers, SADQ and SADQ-C correlated almost perfectly (r = 0.98). In a larger sample of 197 attenders at a controlled drinking clinic, Principal Components Analysis revealed one major factor accounting for 71.7\% of the total variance. High internal reliability was indicated with a Cronbach's Alpha of 0.98. Application of this instrument in a random survey of Western Australian households is then described. It was necessary to remove items relating to 'reinstatement of dependence' for this sample. A single major factor was identified by principal components analysis, accounting for 69.1\% of the total variance. In both the clinic and the community samples SADQ-C scores correlated highly with Impairment of Control scores. The findings are interpreted as supporting the view that there is a single dimension of alcohol dependence upon which all persons who drink alcohol with any regularity may be located.}, author = {Stockwell, T and Sitharthan, T and McGrath, D and Lang, E}, institution = {National Centre for Research into the Prevention of Drug Abuse, Curtin University of Technology, Perth, Western Australia.}, journal = {Addiction Abingdon England}, keywords = {adolescent,adult,aged,alcohol drinking,alcohol drinking adverse effects,alcohol drinking epidemiology,alcohol drinking psychology,alcoholism,alcoholism classification,alcoholism diagnosis,alcoholism epidemiology,alcoholism psychology,cross sectional studies,female,humans,incidence,internal external control,male,middle aged,psychometrics,reproducibility results,substance withdrawal syndrome,substance withdrawal syndrome classification,substance withdrawal syndrome diagnosis,substance withdrawal syndrome epidemiology,substance withdrawal syndrome psychology,western australia,western australia epidemiology}, number = {2}, pages = {167--174}, pmid = {8173482}, title = {{The measurement of alcohol dependence and impaired control in community samples.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/8173482}, volume = {89}, year = {1994} } @article{Dieker2009, abstract = {A process was developed to create Web-based video models of effective instructional practices for use in teacher education settings. Three video models, created at three university sites, demonstrated exemplary implementation of specific, evidence-based strategies in reading, math, and science. Video models of strategies were field tested with preservice and practicing teachers working with diverse student populations. The authors provide an explanation of the video development process and present field-test data that demonstrate the influence of video modeling on teacher learning.}, author = {Dieker, Lisa A. and Lane, Holly B. and Allsopp, David H. and O'Brien, Chris and Wright, Tyran and Kyger, Maggie and Lovin, LouAnn and Fenty, Nicole S.}, journal = {Teacher Education and Special Education}, number = {2}, pages = {180--196}, title = {{Evaluating Video Models of Evidence-Based Instructional Practices to Enhance Teacher Learning}}, volume = {32}, year = {2009} } @inproceedings{Stoyanchev2011, author = {Stoyanchev, Svetlana and Piwek, Paul and Prendinger, Helmut}, booktitle = {Intelligent Virtual Agents}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Stoyanchev, Piwek, Prendinger - 2011 - Comparing modes of information presentation text versus ECA and single versus two ECAs.pdf:pdf}, pages = {377--383}, publisher = {Springer}, title = {{Comparing modes of information presentation: text versus ECA and single versus two ECAs}}, url = {http://www.springerlink.com/index/H72521305G072173.pdf}, year = {2011} } @article{Barrett2006, abstract = {Laypeople and scientists alike believe that they know anger, or sadness, or fear, when they see it. These emotions and a few others are presumed to have specific causal mechanisms in the brain and properties that are observable (on the face, in the voice, in the body, or in experience)—that is, they are assumed to be natural kinds. If a given emotion is a natural kind and can be identified objectively, then it is possible to make discoveries about that emotion. Indeed, the scientific study of emotion is founded on this assumption. In this article, I review the accumulating empirical evidence that is inconsistent with the view that there are kinds of emotion with boundaries that are carved in nature. I then consider what moving beyond a natural-kind view might mean for the scientific understanding of emotion.}, author = {Barrett, Lisa Feldman}, doi = {10.1111/j.1745-6916.2006.00003.x}, journal = {Perspectives on Psychological Science}, pages = {28--58}, title = {{Are Emotions Natural Kinds?}}, volume = {1}, year = {2006} } @article{Wehrle1992, author = {Wehrle, Thomas}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wehrle - 1992 - Automated Coding of Facial Behavior in Human-Computer Interactions with FACS.pdf:pdf}, journal = {Nonverbal Behavior}, number = {2}, pages = {67--84}, title = {{Automated Coding of Facial Behavior in Human-Computer Interactions with FACS}}, volume = {16}, year = {1992} } @misc{Dalsgaard2013, author = {Dalsgaard, Christian}, title = {{Social software: E-learning beyond learning management systems}}, url = {eurodl.org}, urldate = {2013-12-17}, year = {2013} } @inproceedings{Smith2010, abstract = {The development of Embodied Conversational Agents (ECA) as Companions brings several challenges for both affective and conversational dialogue. These include challenges in generating appropriate affective responses, selecting the overall shape of the dialogue, providing prompt system response times and handling interruptions. We present an implementation of such a Companion showing the development of individual modules that attempt to address these challenges. Further, to resolve resulting conflicts, we present encompassing interaction strategies that attempt to balance the competing requirements. Finally, we present dialogues from our working prototype to illustrate these interaction strategies in operation.}, author = {Smith, Cameron and Crook, Nigel and Boye, Johan and Charlton, Daniel and Dobnik, Simon and Pizzi, David and Cavazza, Marc and Pulman, Stephen}, booktitle = {IVA'10 Proceedings of the 10th international conference on Intelligent virtual agents}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Smith et al. - 2010 - Interaction Strategies for an Affective Conversational Agent.pdf:pdf}, keywords = {affective dialogue,companion,conversational dialogue,embodied conversational agents,interaction strategies,interruptions}, pages = {301--314}, publisher = {Springer-Verlag Berlin, Heidelberg}, title = {{Interaction Strategies for an Affective Conversational Agent}}, year = {2010} } @inproceedings{Legaspi2008, author = {Legaspi, Roberto and Kurihara, Satoshi and Fukui, K.I. and Moriyama, Koichi and Numao, Masayuki}, booktitle = {Human system interactions, 2008 conference on}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Legaspi et al. - 2008 - An empathy learning problem for HSI To be empathic, self-improving and ambient.pdf:pdf}, isbn = {1424415438}, keywords = {empathic computing,interfaces,machine learning,user modeling and user-adaptive}, pages = {209--214}, publisher = {IEEE}, title = {{An empathy learning problem for HSI: To be empathic, self-improving and ambient}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=4581435}, year = {2008} } @inproceedings{Cassell2001, abstract = {The Behavior Expression Animation Toolkit (BEAT) allows animators to input typed text that they wish to be spoken by an animated human figure, and to obtain as output appropriate and synchronized nonverbal behaviors and synthesized speech in a form that can be sent to a number of different animation systems. The nonverbal behaviors are assigned on the basis of actual linguistic and contextual analysis of the typed text, relying on rules derived from extensive research into human conversational behavior. The toolkit is extensible, so that new rules can be quickly added. It is designed to plug into larger systems that may also assign personality profiles, motion characteristics, scene constraints, or the animation styles of particular animators.}, author = {Cassell, Justine and Vilhj\'{a}lmsson, HH and Bickmore, Timothy Wallace}, booktitle = {Proceedings of the 28th annual conference on Computer graphics and interactive techniques (SIGGRAPH '01)}, doi = {10.1145/383259.383315}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cassell, Vilhj\'{a}lmsson, Bickmore - 2001 - BEAT the behavior expression animation toolkit.pdf:pdf}, keywords = {animation systems,facial animation,gesture,speech synthesis}, pages = {477--486}, publisher = {ACM}, title = {{BEAT: the behavior expression animation toolkit}}, url = {http://dl.acm.org/citation.cfm?id=383315}, year = {2001} } @incollection{Bevacqua2010, author = {Bevacqua, Elisabetta and Prepin, Ken and Niewiadomski, Radoslaw and de Sevin, Etienne and Pelachaud, Catherine}, booktitle = {Close Engagement with Artificial Companions: Key social, psychological, ethical and design issues}, editor = {Wilks, Yorick}, pages = {143--156}, publisher = {John Benjamins Publishing Co.}, title = {{GRETA : Towards an Interactive Conversational Virtual Companion}}, year = {2010} } @article{Bickmore2010, abstract = {We describe an animated, conversational computer agent designed to promote antipsychotic medication adherence among patients with schizophrenia. In addition to medication adherence, the agent also promotes physical activity and system usage, and includes verbal and nonverbal behavior designed to foster a therapeutic alliance with patients. We discuss special considerations in designing interventions for this patient population, and challenges in developing and evaluating conversational agents in the mental health domain. Results from a pilot evaluation study of the agent indicate that it is accepted and effective.}, author = {Bickmore, Timothy Wallace and Puskar, Kathryn and Schlenk, Elizabeth A and Pfeifer, Laura M and Sereika, Susan M}, issn = {09535438}, journal = {Interacting with Computers}, keywords = {embodied conversational agent,health behavior change,longitudinal study,patient adherence,psychiatric nursing,schizophrenia}, number = {4}, pages = {276--288}, title = {{Maintaining reality: Relational agents for antipsychotic medication adherence}}, url = {http://www.sciencedirect.com/science/article/pii/S095354381000010X}, volume = {22}, year = {2010} } @article{Munhall2004, abstract = {People naturally move their heads when they speak, and our study shows that this rhythmic head motion conveys linguistic information. Three-dimensional head and face motion and the acoustics of a talker producing Japanese sentences were recorded and analyzed. The head movement correlated strongly with the pitch (fundamental frequency) and amplitude of the talker's voice. In a perception study, Japanese subjects viewed realistic talking-head animations based on these movement recordings in a speech-in-noise task. The animations allowed the head motion to be manipulated without changing other characteristics of the visual or acoustic speech. Subjects correctly identified more syllables when natural head motion was present in the animation than when it was eliminated or distorted. These results suggest that nonverbal gestures such as head movements play a more direct role in the perception of speech than previously known.}, author = {Munhall, K G and Jones, Jeffery A and Callan, Daniel E and Kuratate, Takaaki and Vatikiotis-Bateson, Eric}, institution = {Department of Psychology, Queen's University, Kingston, Ontario, Canada. munhallk@psyc.queensu.ca}, journal = {Psychological Science}, keywords = {adult,biomechanics,facial expression,female,gestures,head movements,humans,imaging,male,perceptual distortion,phonetics,semantics,sound localization,sound spectrography,speech acoustics,speech intelligibility,speech perception,three dimensional,user computer interface}, number = {2}, pages = {133--137}, pmid = {14738521}, publisher = {SAGE Publications}, title = {{Visual prosody and speech intelligibility: head movement improves auditory speech perception.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/14738521}, volume = {15}, year = {2004} } @article{LaFrance1982, abstract = {The relationship between client-perceived rapport (as measured from a standardized client) and physical mirroring and the standard counsellor posture was investigated with interviews performed by 59 post-graduate students (47 females and 12 males, aged 21-60 yrs) in counselling psychology. Videotaped recordings were used to code counsellor posture in the categories of: total postural mirroring, mirroring of the hands and arms, mirroring of the legs, mirroring of the torso, and the frequency of the standard counsellor posture across each minute of the interviews. These minutes were classified as 'high' in rapport or 'low' in rapport as measured by the standardized client. Results indicated that there was significantly more postural mirroring of the torso during high versus low minutes, but that the counsellor standard posture occurred significantly more frequently during low rapport minutes than in high rapport minutes. However, when examined over the entire length of the interviews, these data were able to be understood in terms of counsellor 'flexibility' of response rather than simply whether these postural behaviors were present or not. Implications for counsellor training are discussed. (PsycINFO Database Record (c) 2009 APA}, author = {Lafrance, Marianne}, doi = {10.1080/09515070110088843}, issn = {09515070}, journal = {Counselling Psychology Quarterly}, number = {4}, pages = {267--280}, publisher = {Human Sciences Press}, title = {{Posture mirroring and rapport}}, volume = {14}, year = {1982} } @incollection{Catucci2006, abstract = {Empathy is a distributed environment for the generation of emotions and other related affective phenomena like moods and temperaments. Empathy has been conceived as an object-oriented reusable framework entirely written in Java and realized for the purpose of studying the direct influences of emotions on behaviors and on decision-making processes of autonomous agents, interacting in complex or real environments. It allows for the realization of custom emotional agents, usable in several different domains, from the educational applications (e.g. entertainment, video games, intelligent tutoring systems.) to control systems in autonomous robots.}, author = {Catucci, Graziano and Abbattista, Fabio and Gadaleta, R. and Guaccero, Domenico and Semeraro, Giovanni}, booktitle = {Applied Soft Computing Technologies: The Challenge of Complexity}, doi = {10.1007/3-540-31662-0\_21}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Catucci et al. - 2006 - Empathy A computational framework for emotion generation.pdf:pdf}, isbn = {978-3-540-31649-7}, keywords = {autonomous agents,emotional agents,synthetic characters}, pages = {265--277}, publisher = {Springer Berlin / Heidelberg}, title = {{Empathy: A computational framework for emotion generation}}, url = {http://www.springerlink.com/index/LJ26L065L0072722.pdf http://dx.doi.org/10.1007/3-540-31662-0\_21}, year = {2006} } @inproceedings{Lucey2010, abstract = {In 2000, the Cohn-Kanade (CK) database was released for the purpose of promoting research into automatically detecting individual facial expressions. Since then, the CK database has become one of the most widely used test-beds for algorithm development and evaluation. During this period, three limitations have become apparent: 1) While AU codes are well validated, emotion labels are not, as they refer to what was requested rather than what was actually performed, 2) The lack of a common performance metric against which to evaluate new algorithms, and 3) Standard protocols for common databases have not emerged. As a consequence, the CK database has been used for both AU and emotion detection (even though labels for the latter have not been validated), comparison with benchmark algorithms is missing, and use of random subsets of the original database makes meta-analyses difficult. To address these and other concerns, we present the Extended Cohn-Kanade (CK+) database. The number of sequences is increased by 22\% and the number of subjects by 27\%. The target expression for each sequence is fully FACS coded and emotion labels have been revised and validated. In addition to this, non-posed sequences for several types of smiles and their associated metadata have been added. We present baseline results using Active Appearance Models (AAMs) and a linear support vector machine (SVM) classifier using a leave-one-out subject cross-validation for both AU and emotion detection for the posed data. The emotion and AU labels, along with the extended image data and tracked landmarks will be made available July 2010.}, address = {San Francisco, CA}, author = {Lucey, Patrick and Cohn, Jeffrey F and Kanade, Takeo and Saragih, Jason and Ambadar, Zara and Matthews, Iain and Ave, Forbes}, booktitle = {Computer Vision and Pattern Recognition Workshops (CVPRW)}, doi = {10.1109/CVPRW.2010.5543262}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lucey et al. - 2010 - The Extended Cohn-Kanade Dataset ( CK ) A complete dataset for action unit and emotion-specified expression.pdf:pdf}, number = {July}, pages = {94 -- 101}, publisher = {IEEE}, title = {{The Extended Cohn-Kanade Dataset ( CK + ): A complete dataset for action unit and emotion-specified expression}}, year = {2010} } @article{Hess1998, author = {Hess, Ursula and Philippot, Pierre and Blairy, Sylvie}, doi = {10.1080/026999398379547}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hess, Philippot, Blairy - 1998 - Facial Reactions to Emotional Facial Expressions Affect or Cognition.pdf:pdf}, issn = {0269-9931}, journal = {Cognition \& Emotion}, month = jul, number = {4}, pages = {509--531}, title = {{Facial Reactions to Emotional Facial Expressions: Affect or Cognition?}}, url = {http://www.tandfonline.com/doi/abs/10.1080/026999398379547}, volume = {12}, year = {1998} } @inproceedings{Hyun2007, author = {Hyun, KH and Kim, EH}, booktitle = {16th IEEE International Conference on Robot \& Human Interactive Communication}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hyun, Kim - 2007 - Emotional feature extraction based on phoneme information for speech emotion recognition.pdf:pdf}, isbn = {9781424416356}, pages = {802--806}, title = {{Emotional feature extraction based on phoneme information for speech emotion recognition}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=4415195}, year = {2007} } @article{Hodgson2002, abstract = {Using the Alcohol Use Disorders Identification Test (AUDIT) as the gold standard, the Fast Alcohol Screening Test (FAST) was developed for use in busy medical settings. AUDIT questionnaires were completed by 666 patients in two London accident \& emergency (A\&E) departments. Using a principal components analysis, as well as sensitivity and specificity indices, a two-stage screening test was developed, using four of the AUDIT items. The first stage involved one item that identified >50\% of patients as either hazardous or non-hazardous drinkers. The second stage made use of the other three items to categorize the rest. The performance of this four-item questionnaire was then tested across a range of settings. Opportunistic samples of 100 patients completed AUDIT questionnaires in each of the following National Health Service settings: A\&E department, fracture clinic, primary health centre and a dental hospital. It was concluded that the four-item FAST questionnaire had good sensitivity and specificity, across a range of settings, when the AUDIT score was used as the gold standard. The FAST questionnaire is quick to administer, since >50\% of patients are categorized using just one question.}, author = {Hodgson, Ray and Alwyn, Tina and John, Bev and Thom, Betsy and Smith, Alyson}, institution = {University of Wales College of Medicine, Lansdowne Hospital, Cardiff CF11 8PL, UK.}, journal = {Alcohol and alcoholism Oxford Oxfordshire}, number = {1}, pages = {61--66}, publisher = {Oxford University Press}, title = {{The fast alcohol screening test.}}, url = {http://eprints.mdx.ac.uk/129/}, volume = {37}, year = {2002} } @incollection{Mora1999, abstract = {Beliefs-Desires-Intentions models (or BDI models) of agents have been around for quit a long time. The purpose of these models is to characterize agents using anthropomorphic notions, such as mental states and actions. How- ever, despite the fact that many systems have been developed based on these mod- els, it is a general concern that there is a gap between those powerful BDI logics and practical systems. The purpose of this paper is to present a BDI model that, besides being a formal model of agents, is also suitable to be used to implement agents. Instead of defining a new BDI logic or choosing an existing one, and ex- tending it with an operational model, we define the notions of belief, desires and intentions using a logic formalism that is both well-defined and computational.}, author = {Mora, M and Lopes, J and Viccariz, R and Coelho, H}, booktitle = {Intelligent Agents V: Agents Theories, Architectures, and Languages}, chapter = {Section I}, doi = {10.1007/3-540-49057-4\_2}, editor = {Muller, J.P.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mora et al. - 1999 - BDI models and systems Reducing the gap.pdf:pdf}, keywords = {BDI models,agent architectures,logic program- ming.,mental states modeling}, pages = {11--27}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{BDI models and systems: Reducing the gap}}, url = {http://www.springerlink.com/index/m674631247x60251.pdf}, year = {1999} } @book{Greene2003, abstract = {Providing a thorough review and synthesis of work on communication skills and skill enhancement, this "Handbook" serves as a comprehensive and contemporary survey of theory and research on social interaction skills. Editors John O. Greene and Brant R. Burleson have brought together preeminent researchers and writers to contribute to this volume, establishing a foundation on which future study and research will build. The handbook chapters are organized into five major units: general theoretical and methodological issues (models of skill acquisition, methods of skill assessment); fundamental interaction skills (both transfunctional and transcontextual); function-focused skills (informing, persuading, supporting); skills used in management of diverse personal relationships (friendships, romances, marriages); and skills used in varied venues of public and professional life (managing leading, teaching). Distinctive features of this handbook include: broad, comprehensive treatment of work on social interaction skills and skill acquisition; up-to-date reviews of research in each area; and emphasis on empirically supported strategies for developing and enhancing specific skills. Researchers in communication studies, psychology, family studies, business management, and related areas will find this volume a comprehensive, authoritative source on communications skills and their enhancement, and it will be essential reading for scholars and students across the spectrum of disciplines studying social interaction.}, author = {Greene, John O and Burleson, Brant Raney}, booktitle = {Communication}, editor = {Greene, John O and Burleson, Brant R}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Greene, Burleson - 2003 - Handbook of Communication and Social Interaction Skills.pdf:pdf}, isbn = {0805834176}, publisher = {Lawrence Erlbaum Associates, Inc., Publishers}, title = {{Handbook of Communication and Social Interaction Skills}}, year = {2003} } @inproceedings{Lisetti2012, address = {Miami, FLorida}, author = {Lisetti, Christine L}, booktitle = {IHI2012 International Health Informatics Sysmposium}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lisetti - 2012 - 10 Advantages of using Avatars in Patient-Centered Computer-based Interventions for Behavior Change.pdf:pdf}, title = {{10 Advantages of using Avatars in Patient-Centered Computer-based Interventions for Behavior Change}}, year = {2012} } @incollection{Ostermann2002, author = {Ostermann, J\"{o}rn}, booktitle = {MPEG-4 Facial Animation: The Standard, Implementation and Applications}, chapter = {2}, editor = {Pandzic, Igor and Forchheimer, Robert}, isbn = {978-0-470-84465-6}, pages = {17--55}, publisher = {Wiley}, title = {{Face Animation in MPEG-4}}, year = {2002} } @inproceedings{Vinciarelli2008, address = {Chania, Crete, Greece}, author = {Vinciarelli, Alessandro and Pantic, Maja and Bourlard, Herv\'{e} and Pentland, Alex}, booktitle = {Proceedings of the 10th international conference on Multimodal interfaces - IMCI '08}, doi = {10.1145/1452392.1452405}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Vinciarelli et al. - 2008 - Social signals, their function, and automatic analysis.pdf:pdf}, isbn = {9781605581989}, keywords = {computer vision,definitely a key ability,difference between,life being involved in,social behaviour anal-,social intelligence is,social interactions,social signal processing,speech analysis,that can make the,ysis}, pages = {61}, publisher = {ACM Press}, title = {{Social signals, their function, and automatic analysis}}, url = {http://portal.acm.org/citation.cfm?doid=1452392.1452405}, year = {2008} } @article{Fontaine1992, author = {Fontaine, G.}, journal = {Presence: Teleop- erators and Virtual Environments}, number = {4}, pages = {482--490}, title = {{The experience of a sense of presence in intercultural and international encounters}}, volume = {1}, year = {1992} } @book{McDougall1926, address = {Boston}, author = {McDougall, William}, publisher = {Luce}, title = {{An introduction to social psychology}}, year = {1926} } @inproceedings{Morency2005, address = {New York, New York, USA}, author = {Morency, Louis-Philippe and Sidner, Candace and Lee, Christopher and Darrell, Trevor}, booktitle = {Proceedings of the 7th International Conference on Multimodal interfaces (ICMI '05)}, doi = {10.1145/1088463.1088470}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Morency et al. - 2005 - Contextual recognition of head gestures.pdf:pdf}, isbn = {1595930280}, keywords = {context-based recognition,dialog context,embodied conversa-,head gestures,human-computer interaction,tional agent}, publisher = {ACM Press}, title = {{Contextual recognition of head gestures}}, url = {http://portal.acm.org/citation.cfm?doid=1088463.1088470}, year = {2005} } @phdthesis{Bui2004, author = {Bui, The Duy}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bui - 2004 - Creating Emotions and Facial Expressions for Embodied Agents.pdf:pdf}, isbn = {907529610X}, pages = {109--114}, school = {University of Twente}, title = {{Creating Emotions and Facial Expressions for Embodied Agents}}, type = {PhD Thesis}, year = {2004} } @inproceedings{Gonsior2011, abstract = {In this paper, the impact of facial expressions on HRI is explored. To determine their influence on empathy of a human towards a robot and perceived subjective performance, an experimental setup is created, in which participants engage in a dialog with the robot head EDDIE. The web-based gaming application “Akinator” serves as a backbone for the dialog structure. In this game, the robot tries to guess a thought-of person chosen by the human by asking various questions about the person. In our experimental evaluation, the robot reacts in various ways to the human's facial expressions, either ignoring them, mirroring them, or displaying its own facial expression based on a psychological model for social awareness. In which way this robot behavior influences human perception of the interaction is investigated by a questionnaire. Our results support the hypothesis that the robot behavior during interaction heavily influences the extent of empathy by a human towards a robot and perceived subjective task-performance, with the adaptive modes clearly leading compared to the non-adaptive mode.}, address = {Atlanta, GA, USA}, author = {Gonsior, Barbara and Sosnowski, Stefan and Mayer, Christoph and Blume, Jiirgen and Radig, B. and Wollherr, D. and Kuhnlenz, K.}, booktitle = {RO-MAN, 20th IEEE International Symposium on Robot and Human Interactive Communication}, doi = {10.1109/ROMAN.2011.6005294}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gonsior et al. - 2011 - Improving aspects of empathy and subjective performance for HRI through mirroring facial expressions.pdf:pdf}, isbn = {9781457715730}, pages = {350--356}, publisher = {IEEE}, title = {{Improving aspects of empathy and subjective performance for HRI through mirroring facial expressions}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=6005294}, year = {2011} } @article{Tulsky2011, abstract = {BACKGROUND: Quality cancer care requires addressing patients' emotions, which oncologists infrequently do. Multiday courses can teach oncologists skills to handle emotion; however, such workshops are long and costly. OBJECTIVE: To test whether a brief, computerized intervention improves oncologist responses to patient expressions of negative emotion. DESIGN: Randomized, controlled, parallel-group trial stratified by site, sex, and oncologic specialty. Oncologists were randomly assigned to receive a communication lecture or the lecture plus a tailored CD-ROM. (ClinicalTrials.gov registration number: NCT00276627) SETTING: Oncology clinics at a comprehensive cancer center and Veterans Affairs Medical Center in Durham, North Carolina, and a comprehensive cancer center in Pittsburgh, Pennsylvania. PARTICIPANTS: 48 medical, gynecologic, and radiation oncologists and 264 patients with advanced cancer. INTERVENTION: Oncologists were randomly assigned in a 1:1 ratio to receive an interactive CD-ROM about responding to patients' negative emotions. The CD-ROM included tailored feedback on the oncologists' own recorded conversations. MEASUREMENTS: Postintervention audio recordings were used to identify the number of empathic statements and responses to patients' expressions of negative emotion. Surveys evaluated patients' trust in their oncologists and perceptions of their oncologists' communication skills. RESULTS: Oncologists in the intervention group used more empathic statements (relative risk, 1.9 [95\% CI, 1.1 to 3.3]; P = 0.024) and were more likely to respond to negative emotions empathically (odds ratio, 2.1 [CI, 1.1 to 4.2]; P = 0.028) than control oncologists. Patients of intervention oncologists reported greater trust in their oncologists than did patients of control oncologists (estimated mean difference, 0.1 [CI, 0.0 to 0.2]; P = 0.036). There was no significant difference in perceptions of communication skills. LIMITATIONS: Long-term effects were not examined. The findings may not be generalizable outside of academic medical centers. CONCLUSION: A brief computerized intervention improves how oncologists respond to patients' expressions of negative emotions. PRIMARY FUNDING SOURCE: National Cancer Institute.}, author = {Tulsky, James a and Arnold, Robert M and Alexander, Stewart C and Olsen, Maren K and Jeffreys, Amy S and Rodriguez, Keri L and Skinner, Celette Sugg and Farrell, David and Abernethy, Amy P and Pollak, Kathryn I}, doi = {10.7326/0003-4819-155-9-201111010-00007}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Tulsky et al. - 2011 - Enhancing communication between oncologists and patients with a computer-based training program a randomized tria.pdf:pdf}, issn = {1539-3704}, journal = {Annals of internal medicine}, keywords = {Clinical Competence,Communication,Computer-Assisted Instruction,Depression,Depression: etiology,Empathy,Humans,Medical Oncology,Medical Oncology: education,Neoplasms,Neoplasms: psychology,Patients,Patients: psychology,Physician-Patient Relations,Single-Blind Method,Software,Stress, Psychological,Stress, Psychological: etiology,Trust}, month = nov, number = {9}, pages = {593--601}, pmid = {22041948}, title = {{Enhancing communication between oncologists and patients with a computer-based training program: a randomized trial.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=3368370\&tool=pmcentrez\&rendertype=abstract}, volume = {155}, year = {2011} } @article{Hine2009, author = {Hine, Michael J. and Murphy, Steven a. and Weber, Michael and Kersten, Gregory}, doi = {10.1007/s10726-008-9151-9}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hine et al. - 2009 - The Role of Emotion and Language in Dyadic E-negotiations.pdf:pdf}, issn = {0926-2644}, journal = {Group Decision and Negotiation}, keywords = {computer mediated communication,electronic negotiation,emotion,logistic regression}, month = jan, number = {3}, pages = {193--211}, title = {{The Role of Emotion and Language in Dyadic E-negotiations}}, url = {http://www.springerlink.com/index/10.1007/s10726-008-9151-9}, volume = {18}, year = {2009} } @article{Bertakis1991, abstract = {The results of previous studies on the relationship between patient satisfaction and specific interviewing behaviors have been difficult to generalize because most studies have examined small samples of patients at one clinical location, and have used initial or acute care visits where the patient and physician did not have an established relationship. The present collaborative study of medical interviewing provided an opportunity to collect interviews from 550 return visits to 127 different physicians at 11 sites across the country. Tape recordings were analyzed using the Roter Interaction Analysis System, and postvisit satisfaction questionnaires were administered to patients. A number of significant relationships were found between communication during the visit and the various dimensions of patient satisfaction. Physician question asking about biomedical topics (both open- and closed-ended questions) was negatively related to patient satisfaction; however, physician question asking about psychosocial topics was positively related. Physician counseling for psychosocial issues was also positively related to patient satisfaction. Similarly, patient talk about biomedical topics was negatively related to satisfaction, while patient talk regarding psychosocial topics was positively related. Furthermore, patients were less satisfied when physicians dominated the interview by talking more or when the emotional tone was characterized by physician dominance. The findings suggest that patients are most satisfied by interviews that encourage them to talk about psychosocial issues in an atmosphere that is characterized by the absence of physician domination.}, author = {Bertakis, K D and Roter, Debra L and Putnam, S M}, institution = {Department of Family Practice, University of California, Davis, Sacramento 95817.}, journal = {The Journal of family practice}, number = {2}, pages = {175--181}, pmid = {1990046}, title = {{The relationship of physician medical interview style to patient satisfaction.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/1990046}, volume = {32}, year = {1991} } @inproceedings{Charniak2000, abstract = {We present a new parser for parsing down to Penn tree-bank style parse trees that achieves 90.1\% average precision/recall for sentences of length 40 and less, and 89.5\% for sentences of length 100 and less when trMned and tested on the previously established [5,9,10,15,17] "stan- dard" sections of the Wall Street Journal tree- bank. This represents a 13\% decrease in er- ror rate over the best single-parser results on this corpus [9]. The major technical innova- tion is tire use of a "ma\~{}ximum-entropy-inspired" model for conditioning and smoothing that let us successfully to test and combine many differ- ent conditioning events. We also present some partial results showing the effects of different conditioning information, including a surpris- ing 2\% improvement due to guessing the lexical head's pre-terminal before guessing the lexical head.}, author = {Charniak, Eugene}, booktitle = {1st North American chapter of the Association for Computational Linguistics conference (NAACL' 2000)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Charniak - 2000 - A Maximum-Entropy-Inspired Parser.pdf:pdf}, number = {c}, pages = {132--139}, publisher = {Association for Computational Linguistics Stroudsburg, PA, USA}, title = {{A Maximum-Entropy-Inspired Parser}}, year = {2000} } @inproceedings{Melo2010, address = {Philadelphia, PA}, author = {de Melo, Celso M. and Carnevale, Peter and Gratch, Jonathan}, booktitle = {Proceedings of the 10th International Conference on Intelligent Virtual Agents (IVA'10)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Melo, Carnevale, Gratch - 2010 - The Influence of Emotions in Embodied Agents on Human Decision-Making.pdf:pdf}, keywords = {cooperation,decision making,embodied agents,emotion}, pages = {357--370}, publisher = {Springer}, title = {{The Influence of Emotions in Embodied Agents on Human Decision-Making}}, year = {2010} } @article{Saunier2010, author = {Saunier, Julien and Jones, Hazael and Lourdeaux, Domitile}, doi = {10.1109/WI-IAT.2010.255}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Saunier, Jones, Lourdeaux - 2010 - Empathy and Placebo for Autonomous Agents.pdf:pdf}, isbn = {978-1-4244-8482-9}, journal = {2010 IEEE/WIC/ACM International Conference on Web Intelligence and Intelligent Agent Technology}, keywords = {emotions,empathy,multi-agent architecture,personality,placebo}, month = aug, pages = {277--282}, publisher = {Ieee}, title = {{Empathy and Placebo for Autonomous Agents}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5616059}, year = {2010} } @article{Moridis2012a, abstract = {—Empathetic behavior has been suggested to be one effective way for Embodied Conversational Agents (ECAs) to provide feedback to learners’ emotions. An issue that has been raised is the effective integration of parallel and reactive empathy. The aim of this study is to examine the impact of ECAs’ emotional facial and tone of voice expressions combined with empathetic verbal behavior when displayed as feedback to students’ fear, sad, and happy emotions in the context of a self-assessment test. Three identical female agents were used for this experiment: 1) an ECA performing parallel empathy combined with neutral emotional expressions, 2) an ECA performing parallel empathy displaying emotional expressions that were relevant to the emotional state of the student, and 3) an ECA performing parallel empathy by displaying relevant emotional expressions followed by emotional expressions of reactive empathy with the goal of altering the student’s emotional state. Results indicate that an agent performing parallel empathy displaying emotional expressions relevant to the emotional state of the student may cause this emotion to persist. Moreover, the agent performing parallel and then reactive empathy appeared to be effective in altering an emotional state of fear to a neutral one.}, author = {Moridis, Christos N and Economides, Anastasios A and Member, Senior}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Moridis, Economides, Member - 2012 - Affective Learning Empathetic Agents with Emotional Facial and Tone of Voice Expressions.pdf:pdf}, journal = {IEEE Transactions on Affective Computing}, keywords = {empathy,intelligent agents,user interfaces,—Computers and education}, number = {3}, pages = {260--272}, title = {{Affective Learning : Empathetic Agents with Emotional Facial and Tone of Voice Expressions}}, volume = {3}, year = {2012} } @book{McNeill1992, abstract = {What is the relation between gestures and speech? In terms of symbolic forms, of course, the spontaneous and unwitting gestures we make while talking differ sharply from spoken language itself. Whereas spoken language is linear, segmented, standardized, and arbitrary, gestures are global, synthetic, idiosyncratic, and imagistic. In Hand and Mind, David McNeill presents a bold theory of the essential unity of speech and the gestures that accompany it. This long-awaited, provocative study argues that the unity of gestures and language far exceeds the surface level of speech noted by previous researchers and in fact also includes the semantic and pragmatic levels of language. In effect, the whole concept of language must be altered to take into account the nonsegmented, instantaneous, and holistic images conveyed by gestures. McNeill and his colleagues carefully devised a standard methodology for examining the speech and gesture behavior of individuals engaged in narrative discourse. A research subject is shown a cartoon like the 1950 Canary Row-a classic Sylvester and Tweedy Bird caper that features Sylvester climbing up a downspout, swallowing a bowling ball and slamming into a brick wall. After watching the cartoon, the subject is videotaped recounting the story from memory to a listener who has not seen the cartoon. Painstaking analysis of the videotapes revealed that although the research subjects-children as well as adults, some neurologically impaired-represented a wide variety of linguistic groupings, the gestures of people speaking English and a half dozen other languages manifest the same principles. Relying on data from more than ten years of research, McNeill shows thatgestures do not simply form a part of what is said and meant but have an impact on thought itself. He persuasively argues that because gestures directly transfer mental images to visible forms, conveying ideas that language cannot always express, we must examine language and gesture together to unveil the operations of the mind.}, author = {McNeill, David}, booktitle = {Library}, isbn = {0226561348}, pages = {423}, publisher = {University of Chicago Press}, series = {Psychology/cognitive science}, title = {{Hand and Mind: What Gestures Reveal about Thought}}, url = {http://uwashington.worldcat.org.offcampus.lib.washington.edu/title/hand-and-mind-what-gestures-reveal-about-thought/oclc/24379126\&referer=brief\_results}, year = {1992} } @article{Behrend2012, abstract = {It is increasingly common for people engaging in computer–mediated interactions to be accompanied by a digital avatar that represents them. Little is known, however, about how these avatars influence others’ impressions. We examine this question in the context of employment interviews. It is well known that attractive job candidates are afforded an advantage in traditional face-to-face job interviews. We investigate whether raters evaluating computer–mediated interviews will follow a similar pattern when a digital avatar represents the candidate. To investigate this question, we asked 374 raters to view an interview transcript that was accompanied by either a male or female avatar, applying for either a male or female gender-typed job. We found that candidates with more attractive avatars received more favorable interview ratings, regardless of job gender type. These findings support the notion that the ‘‘what is beautiful is good’’ stereotype influences interview ratings even in computer-mediated interviews; raters automatically apply the same heuristics to digital and non-digital faces.}, author = {Behrend, Tara S. and Toaddy, Steven and Thompson, Lori Foster and Sharek, David J.}, doi = {10.1016/j.chb.2012.06.017}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Behrend et al. - 2012 - The effects of avatar appearance on interviewer ratings in virtual employment interviews.pdf:pdf}, issn = {07475632}, journal = {Computers in Human Behavior}, keywords = {Computer–mediated interview Attractiveness bias Se,Virtual world}, month = jul, number = {6}, pages = {2128--2133}, publisher = {Elsevier Ltd}, title = {{The effects of avatar appearance on interviewer ratings in virtual employment interviews}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0747563212001677}, volume = {28}, year = {2012} } @article{Larsen1992, abstract = {TOC The structural bases of emotional behavior James R. Averill - Promises and problems with the circumplex model of emotion Randy J. Larsen and Edward Diener - The complexity of intensity Nico H. Frijda, Andrew Ortony, Joep Sonnemans, and Gerald L. Clore - The behavioral ecology and sociality of human faces Alan J. Firdlund - Appraisal as a cause of emotion Brian Parkinson and A.S.R. Manstead - Affective dynamics Robert Mauro - Cross-cultural similarities and differences in emotion and its representation Phillip R. Shaver, Shelley Wu, and Judith C. Schwartz - The process of emotional experience James D. Laird and Charles Bresler - Inhibitory effects of awareness on affective responding Robert F. Bornstein - A functional analysis of the role of mood in affective systems William N. Morris - Differentiating affect, mood, and emotion C. Daniel Batson, Laura L. Shaw, and Kathryn C. Oleson}, author = {Larsen, Randy J and Diener, Edward}, chapter = {2}, editor = {Clark, Margaret S}, isbn = {0803946139}, journal = {Review of Personality and Social Psychology}, number = {13}, pages = {25--59}, publisher = {Sage}, series = {Review of personality and social psychology; No. 13; 0270-1987}, title = {{Promises and problems with the circumplex model of emotion}}, url = {http://psycnet.apa.org/psycinfo/1992-97396-002}, volume = {13}, year = {1992} } @article{Stevenson2007, abstract = {The Affective Norms for English Words (ANEW) are a commonly used set of 1034 words characterized on the affective dimensions of valence, arousal, and dominance. Traditionally, studies of affect have used stimuli characterized along either affective dimensions or discrete emotional categories, but much current research draws on both of these perspectives. As such, stimuli that have been thoroughly characterized according to both of these approaches are exceptionally useful. In an effort to provide researchers with such a characterization of stimuli, we have collected descriptive data on the ANEW to identify which discrete emotions are elicited by each word in the set. Our data, coupled with previous characterizations of the dimensional aspects of these words, will allow researchers to control for or manipulate stimulus properties in accordance with both dimensional and discrete emotional views, and provide an avenue for further integration of these two perspectives. Our data have been archived at}, author = {Stevenson, Ryan A and Mikels, Joseph A and James, Thomas W}, institution = {Department of Psychological and Brain Sciences, Indiana University, Bloomington, Indiana 47405, USA. stevenra@indiana.edu}, journal = {Behavior Research Methods}, number = {1}, pages = {1020--1024}, pmid = {18183921}, publisher = {Psychonomic Society}, title = {{Characterization of the affective norms for English words by discrete emotional categories.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18183921}, volume = {40}, year = {2007} } @inproceedings{Arya2006, abstract = {Modern multimedia presentations are aggregations of objects with different types such as video and audio. Due to the importance of facial actions and expressions in verbal and non-verbal communication, the authors have proposed “face multimedia object” as a new higher-level media type that encapsulates all the requirements of facial animation for a face-based multimedia presentations within one single object. In this paper, Interactive Face Animation - Comprehensive Environment (iFACE) is described as a general-purpose software framework that implements the “face multimedia object” and provides the related functionality and tools for a variety of interactive applications such as games and online services. iFACE exposes programming interfaces and provides authoring and scripting tools to design a face object, define its behaviours, and animate it through static or interactive situations. The framework is based on four parameterized spaces of Geometry, Mood, Personality, and Knowledge that together form the appearance and behaviour of the face. iFACE can function as a common “face engine” for design and run-time environments to simplify the work of content and software developers.}, address = {Plzen, Czech Republic}, author = {Arya, Ali and DiPaola, Steve and Jefferies, Lisa and Enns, James T.}, booktitle = {14th International Conference on Computer Graphics, Visualization and Computer Vision (WSCG’2006)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Arya et al. - 2006 - Socially communicative characters for interactive applications.pdf:pdf}, isbn = {8086943038}, keywords = {and moving head,behavior,face animation,figure 1,game,geometry,interactive,parameter,personality,sample animated 3d heads,showing expressions,talking}, publisher = {UNION Agency – Science Press}, title = {{Socially communicative characters for interactive applications}}, url = {http://summit.sfu.ca/system/files/iritems1/577/dipaola-sociallycommunicative.pdf}, year = {2006} } @article{Davis1983, author = {Davis, Mark H.}, doi = {10.1037/0022-3514.44.1.113}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Davis - 1983 - Measuring individual differences in empathy Evidence for a multidimensional approach.pdf:pdf}, issn = {0022-3514}, journal = {Journal of Personality and Social Psychology}, number = {1}, pages = {113--126}, title = {{Measuring individual differences in empathy: Evidence for a multidimensional approach.}}, volume = {44}, year = {1983} } @book{Baron-Cohen2003, address = {New York}, author = {Baron-Cohen, S.}, pages = {2}, publisher = {Basic Books}, title = {{The essential difference: The truth about the male and female brain}}, year = {2003} } @inproceedings{DeCarolis2010, abstract = {As far as interaction is concerned Ambient Intelligence (AmI) research emphasizes the need of natural and friendly interfaces for accessing services provided by the environment. In this paper we present the result of an experimental study aiming at understanding whether Embodied Conversational Agents (ECAs) and Social Robots may improve the naturalness and effectiveness of interaction by playing different roles when acting as interface between users and smart environment services. Results obtained so far show that ECAs seem to have a better evaluation than robots for information related tasks. On the other side, Social Robots are preferred for welcoming people and for guiding them in the smart environment, due to their possibility to move and to the perceived sense of presence. Moreover, the robot seems to elicit a more positive evaluation in terms of user experience.}, address = {New York, New York, USA}, author = {{De Carolis}, Berardina and Mazzotta, Irene and Novielli, Nicole and Pizzutilo, Sebastiano}, booktitle = {Proceedings of the International Conference on Advanced Visual Interfaces - AVI '10}, doi = {10.1145/1842993.1843041}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/De Carolis et al. - 2010 - Social robots and ECAs for accessing smart environments services.pdf:pdf}, isbn = {9781450300766}, keywords = {animated interfaces,interface evaluation}, pages = {275--278}, publisher = {ACM Press}, title = {{Social robots and ECAs for accessing smart environments services}}, url = {http://portal.acm.org/citation.cfm?doid=1842993.1843041}, year = {2010} } @inproceedings{Amini2012, abstract = {In this article, we present HapFACS 1.0, a new software/API for generating static and dynamic three-dimensional facial expressions based on the Facial Action Coding System (FACS). HapFACS pro- vides total control over the FACS Action Units (AUs) activated at all levels of intensity. HapFACS allows generating faces with an individual AU or composition of AUs activated unilaterally or bilat- erally with different intensities. The reliable and emotionally valid facial expressions can be generated on infinite number of faces in different ethnicities, genders, and ages using HapFACS to be used in numerous scientific areas including psychology, emotion, FACS learning, clinical, and neuroscience research.}, address = {Vienna, AUSTRIA}, author = {Amini, Reza and Yasavur, U and Lisetti, Christine L}, booktitle = {Proceedings of the ACM 3rd International Symposium on Facial Analysis and Animation (FAA'12)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Amini, Yasavur, Lisetti - 2012 - HapFACS 1.0 SoftwareAPI for Generating FACS-Based Facial Expressions.pdf:pdf}, publisher = {ACM Press}, title = {{HapFACS 1.0: Software/API for Generating FACS-Based Facial Expressions}}, url = {http://ascl.cis.fiu.edu/uploads/1/3/4/2/13423859/amini-faa-2012.pdf}, year = {2012} } @article{Kennedy2012, abstract = {Information technology can help individuals to change their health behaviors. This is due to its potential for dynamic and unbiased information processing enabling users to monitor their own progress and be informed about risks and opportunities specific to evolving contexts and motivations. However, in many behavior change interventions, information technology is underused by treating it as a passive medium focused on efficient transmission of information and a positive user experience.}, author = {Kennedy, Catriona M and Powell, John and Payne, Thomas H and Ainsworth, John and Boyd, Alan and Buchan, Iain}, doi = {10.2196/jmir.1893}, journal = {Journal of Medical Internet Resources}, number = {3}, title = {{Active Assistance Technology for Health-Related Behavior Change: An Interdisciplinary Review}}, url = {http://www.ncbi.nlm.nih.gov/pmc/articles/PMC3415065/}, volume = {14}, year = {2012} } @article{Hess1960, abstract = {Increases in the size of the pupil of the eye have been found to accompany the viewing of emotionally toned or interesting visual stimuli. A technique for recording such changes has been developed, and preliminary results with cats and human beings are reported with attention being given to differences between the sexes in response to particular types of material.}, author = {Hess, E H and Polt, J M}, journal = {Science}, keywords = {pupil,sex characteristics,sympathetic nervous system}, number = {3423}, pages = {349--350}, pmid = {14401489}, title = {{Pupil size as related to interest value of visual stimuli.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/14401489}, volume = {132}, year = {1960} } @article{Allwood2002, author = {Allwood, Jens}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Allwood - 2002 - Bodily communication dimensions of expression and content.pdf:pdf}, journal = {Multimodality in language and speech systems}, number = {26}, pages = {1--15}, publisher = {Kluwer Academic Publishers}, title = {{Bodily communication dimensions of expression and content}}, url = {http://books.google.com/books?hl=en\&lr=\&id=EhvKccHyp-YC\&oi=fnd\&pg=PA7\&dq=BODILY+COMMUNICATION+DIMENSIONS+OF+EXPRESSION+AND+CONTENT\&ots=VJi0CsBpSv\&sig=hKcLiJG5dnvRXrrCAzlwTC3Le8M}, volume = {7}, year = {2002} } @book{Sutcliffe2010, author = {Sutcliffe, Alistair}, editor = {Carroll, John M. and Frymoyer, Edward M.}, pages = {3}, publisher = {Synthesis Lectures on Human-Centered Informatics}, title = {{Designing for User Engagement: Aesthetic and Attraction user Interfaces}}, year = {2010} } @book{Dimeff1999, abstract = {(from the cover) This manual presents a pragmatic and clinically proven approach to the prevention and treatment of undergraduate alcohol abuse. The Brief Alcohol Screening and Intervention for College Students (BASICS) model is a nonconfrontational, harm reduction approach that helps students reduce their alcohol consumption and decrease the behavioral and health risks associated with heavy drinking. Including reproducible handouts and assessment forms, the book takes readers step-by-step through conducting BASICS assessment and feedback sessions. Special topics covered include the use of Diagnostic and Statistical Manual of Mental Disorders-IV (DSM-IV) criteria to evaluate alcohol abuse, ways to counter defensiveness about drinking and how to help students who continue to drink in a hazardous fashion. (PsycINFO Database Record (c) 2010 APA, all rights reserved) (cover)}, author = {Dimeff, Linda A and Baer, John S and Kivlahan, Daniel R and Marlatt, G Alan}, booktitle = {The Journal of Psychiatry Law}, isbn = {1572303921}, pages = {1929--1945}, publisher = {Guilford Press}, title = {{Brief alcohol screening and intervention for college students (BASICS): A harm reduction approach}}, url = {http://search.ebscohost.com/login.aspx?direct=true\&db=psyh\&AN=1999-02125-000\&lang=fr\&site=ehost-live}, volume = {30}, year = {1999} } @inproceedings{Liu2010a, abstract = {Emotions accompany everyone in the daily life, playing a key role in non-verbal communication, and they are essential to the understanding of human behavior. Emotion recognition could be done from the text, speech, facial expression or gesture. In this paper, we concentrate on recognition of “inner” emotions from electroencephalogram (EEG) signals as humans could control their facial expressions or vocal intonation. The need and importance of the automatic emotion recognition from EEG signals has grown with increasing role of brain computer interface applications and development of new forms of human-centric and human-driven interaction with digital media. We propose fractal dimension based algorithm of quantification of basic emotions and describe its implementation as a feedback in 3D virtual environments. The user emotions are recognized and visualized in real time on his/her avatar adding one more so-called “emotion dimension” to human computer interfaces.}, address = {Singapore}, author = {Liu, Yisi and Sourina, Olga and Nguyen, Minh Khoa}, booktitle = {International Conference on Cyberworlds (CW)}, doi = {10.1109/CW.2010.37}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Liu, Sourina, Nguyen - 2010 - Real-Time EEG-Based Human Emotion Recognition and Visualization(2).pdf:pdf}, isbn = {978-1-4244-8301-3}, keywords = {- emotion recognition,bci,eeg,emotion visualization,fractal dimension,hci}, month = oct, pages = {262--269}, publisher = {IEEE Computer Society}, title = {{Real-Time EEG-Based Human Emotion Recognition and Visualization}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5656346}, year = {2010} } @book{Turkle1995, address = {New York}, author = {Turkle, S.}, publisher = {Simon \& Schuster}, title = {{Life on the Screen}}, year = {1995} } @article{Kleinsmith2013, author = {Kleinsmith, Andrea and Bianchi-Berthouze, Nadia}, doi = {10.1109/T-AFFC.2012.16}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kleinsmith, Bianchi-Berthouze - 2013 - Affective Body Expression Perception and Recognition A Survey.pdf:pdf}, issn = {1949-3045}, journal = {IEEE Transactions on Affective Computing}, month = jan, number = {1}, pages = {15--33}, title = {{Affective Body Expression Perception and Recognition: A Survey}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6212434}, volume = {4}, year = {2013} } @article{Matthews1993, abstract = {Healers must try to understand what the illness means to the patient and create a therapeutic sense of connection in the patient-clinician relationship. A favorable climate for "connexional" experiences can be created through the use of various interviewing techniques. Attending to rapport, silencing internal talk, accessing unconscious processes, and communicating understanding can help clinicians enhance their sensitivity to the subtle clues on which issues of meaning and connection often depend. Several risks are associated with the establishment of closer patient-clinician relationships, including dependence and power issues, sexual attraction, and deeper exposure of the clinician to the patient's pain. Prepared with an awareness of these risks and techniques to address them, clinicians are encouraged to deepen their level of dialogue with patients, to compare their experiences with those of other clinicians, and to thereby develop a more systematic understanding of therapeutic relationships.}, author = {Matthews, D A and Suchman, A L and Branch, W T}, institution = {National Center for Chronic Fatigue, Arlington, Virginia.}, journal = {Annals of Internal Medicine}, keywords = {professional patient relationship}, number = {12}, pages = {973--977}, pmid = {8489112}, title = {{Making "connexions": enhancing the therapeutic potential of patient-clinician relationships.}}, volume = {118}, year = {1993} } @incollection{Feshbach1987, abstract = {Explore the theoretical relationship between parental empathy, particularly maternal empathy, and child adjustment and to report preliminary findings from two recent research projects (PsycINFO Database Record (c) 2009 APA, all rights reserved)}, author = {Feshbach, Norma Deitch}, booktitle = {Empathy and its development}, editor = {Eisenberg, N and Strayer, J}, isbn = {0521326095}, pages = {271--291}, publisher = {New York, NY, US: Cambridge University Press}, series = {Cambridge studies in social and emotional development.}, title = {{Parental empathy and child adjustment/maladjustment.}}, year = {1987} } @inproceedings{Nakano2010, abstract = {In face-to-face conversations, speakers are continuously checking whether the listener is engaged in the conversation and change the conversational strategy if the listener is not fully engaged in the conversation. With the goal of building a conversational agent that can adaptively control conversations with the user, this study analyzes the user’s gaze behaviors and proposes a method for estimating whether the user is engaged in the conversation based on gaze transition 3-gram patterns. First, we conduct a Wizard- of-Oz experiment to collect Based on the analysis of the gaze data, we propose an engagement estimation method that the user’s gaze behaviors. detects the user’s disengagement gaze patterns. The algorithm is implemented as a real-time engagement-judgment mechanism and is incorporated into a multimodal dialogue manager in a conversational agent. The agent estimates the user’s conversational engagement and generates probing questions when the user is distracted from the conversation. Finally, we conduct an evaluation experiment using the proposed engagement-sensitive agent and demonstrate that engagement the estimation function improves the user’s impression of the agent and the interaction with the agent. In addition, probing performed with proper timing was also found to have a positive effect on user’s verbal/nonverbal behaviors in communication with the conversational agent.}, address = {Hong Kong, China}, author = {Nakano, I and Ishii, Ryo}, booktitle = {IUI '10 Proceedings of the 15th international conference on Intelligent user interfaces}, editor = {ACM}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Nakano, Ishii - 2010 - Estimating User ’ s Engagement from Eye-gaze Behaviors in Human-Agent Conversations.pdf:pdf}, isbn = {9781605585154}, keywords = {conversational agent,conversational engagement,dialogue management.,eye-gaze}, pages = {139--148}, title = {{Estimating User ’ s Engagement from Eye-gaze Behaviors in Human-Agent Conversations}}, year = {2010} } @article{Mandryk2006, abstract = {Emerging technologies offer exciting new ways of using entertainment technology to create fantastic play experiences and foster interactions between players. Evaluating entertainment technology is challenging because success isn’t defined in terms of productivity and performance, but in terms of enjoyment and interaction. Current subjective methods of evaluating entertainment technology aren’t sufficiently robust. This paper describes two experiments designed to test the efficacy of physiological measures as evaluators of user experience with entertainment technologies. We found evidence that there is a different physiological response in the body when playing against a computer versus playing against a friend. These physiological results are mirrored in the subjective reports provided by the participants. In addition, we provide guidelines for collecting physiological data for user experience analysis, which were informed by our empirical investigations. This research provides an initial step towards using physiological responses to objectively evaluate a user’s experience with entertainment technology.}, author = {Mandryk, Regan L. and Inkpen, Kori M. and Calvert, Thomas W.}, doi = {10.1080/01449290500331156}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mandryk, Inkpen, Calvert - 2006 - Using psychophysiological techniques to measure user experience with entertainment technologies.pdf:pdf}, journal = {Behaviour \& Information Technology}, number = {2}, pages = {141--158}, title = {{Using psychophysiological techniques to measure user experience with entertainment technologies}}, url = {http://www.tandfonline.com/doi/abs/10.1080/01449290500331156}, volume = {25}, year = {2006} } @article{Friesen1983, author = {Friesen, Wallace V and Ekman, Paul}, journal = {Unpublished manuscript, University of California at San Francisco}, publisher = {University of California}, title = {{EMFACS-7: Emotional Facial Action Coding System}}, url = {http://scholar.google.com/scholar?hl=en\&btnG=Search\&q=intitle:EMFACS-7:+Emotional+Facial+Action+Coding+System\#0}, year = {1983} } @book{Hoffman2000, author = {Hoffman, Martin L.}, booktitle = {Development}, isbn = {052158034X}, pages = {2}, publisher = {Cambridge University Press}, title = {{Empathy and Moral Development: Implications for Caring and Justice}}, year = {2000} } @inproceedings{Hartmann2005, abstract = {To increase the believability and life-likeness of Embodied Conversational Agents (ECAs), we introduce a behavior syn- thesis technique for the generation of expressive gesturing. A small set of dimensions of expressivity is used to char- acterize individual variability of movement. We empirically evaluate our implementation in two separate user studies. The results suggest that our approach works well for a sub- set of expressive behavior. However, animation fidelity is not high enough to realize subtle changes. Interaction effects between different parameters need to be studied further.}, address = {Utrecht, Netherlands}, author = {Hartmann, B and Mancini, M and Buisine, S. and Pelachaud, Catherine}, booktitle = {4th International Joint Conference on Autonomous Agents and Multi Agent Systems (AAMAS'05)}, doi = {10.1145/1082473.1082640}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hartmann et al. - 2005 - Design and evaluation of expressive gesture synthesis for embodied conversational agents.pdf:pdf}, isbn = {1595930949}, keywords = {embodied conversational agents}, pages = {1095--1096}, publisher = {ACM}, title = {{Design and evaluation of expressive gesture synthesis for embodied conversational agents}}, url = {http://dl.acm.org/citation.cfm?id=1082640}, year = {2005} } @article{Harrigan1983, author = {Harrigan, J. A. and Rosenthal, Robert}, journal = {Journal of Applied Social Psychology}, pages = {496--509}, title = {{Physicians’ head and body position as determinants of perceived rapport}}, volume = {13}, year = {1983} } @article{Pereira2011, abstract = {For robots to become our personal companions in the future, they need to know how to socially interact with us. One defining charac- teristic of human social behaviour is empathy. In this paper, we present a robot that acts as a social companion expressing different kinds of empathic behaviours through its facial expressions and utterances. The robot comments the moves of two subjects playing a chess game against each other, being empathic to one of them and neutral towards the other. The results of a pilot study suggest that users to whom the robot was empathic perceived the robot more as a friend.}, author = {Pereira, A. and Leite, Iolanda and Mascarenhas, Samuel and Martinho, Carlos and Paiva, Ana}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pereira et al. - 2011 - Using empathy to improve human-robot relationships.pdf:pdf}, journal = {Human-Robot Personal Relationships}, keywords = {companionship,empathy,human-robot interaction}, pages = {130--138}, publisher = {Springer}, title = {{Using empathy to improve human-robot relationships}}, url = {http://www.springerlink.com/index/R468X62581620V62.pdf}, volume = {LNICST 59}, year = {2011} } @article{MarketingNPV2008, author = {MarketingNPV}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/MarketingNPV - 2008 - HowDo YouMeasure Engagement Start byDefining It in the Right Context.pdf:pdf}, journal = {MarketingNPV Journal}, number = {1}, pages = {3--7}, title = {{HowDo YouMeasure Engagement? Start byDefining It in the Right Context}}, volume = {5}, year = {2008} } @inproceedings{Bartneck2008, abstract = {This study emphasizes the need for standardized measurement tools for human robot interaction (HRI). If we are to make progress in this field then we must be able to compare the results from different studies. A literature review has been performed on the measurements of five key concepts in HRI: anthropomorphism, animacy, likeability, perceived intelligence, and perceived safety. The results have been distilled into five consistent questionnaires using semantic differential scales. We report reliability and validity indicators based on several empirical studies that used these questionnaires. It is our hope that these questionnaires can be used by robot developers to monitor their progress. Psychologists are invited to further develop the questionnaires by adding new concepts, and to conduct further validations where it appears necessary.}, address = {Amsterdam}, author = {Bartneck, Christoph and Kulic, Dana and Croft, Elizabeth}, booktitle = {Proceedings of the Metrics for Human-Robot Interaction Workshop in affiliation with the 3rd ACM/IEEE International Conference on Human-Robot Interaction (HRI 2008), Technical Report 471}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bartneck, Kulic, Croft - 2008 - Measuring the anthropomorphism, animacy, likeability, perceived intelligence and perceived safety of rob.pdf:pdf}, keywords = {Human factors,measurement,perception,robot}, pages = {37--44}, publisher = {University of Hertfordshire}, title = {{Measuring the anthropomorphism, animacy, likeability, perceived intelligence and perceived safety of robots}}, url = {http://ece.uwaterloo.ca/~dkulic/pubs/bartneckKulicCroft.pdf}, volume = {471}, year = {2008} } @article{Rameson2009, author = {Rameson, Lian T. and Lieberman, Matthew D.}, doi = {10.1111/j.1751-9004.2008.00154.x}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rameson, Lieberman - 2009 - Empathy A Social Cognitive Neuroscience Approach.pdf:pdf}, issn = {17519004}, journal = {Social and Personality Psychology Compass}, month = jan, number = {1}, pages = {94--110}, title = {{Empathy: A Social Cognitive Neuroscience Approach}}, url = {http://doi.wiley.com/10.1111/j.1751-9004.2008.00154.x}, volume = {3}, year = {2009} } @article{Bryant2008, author = {Bryant, Gregory a. and Barrett, H. Clark}, doi = {10.1163/156770908X289242}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bryant, Barrett - 2008 - Vocal Emotion Recognition Across Disparate Cultures.pdf:pdf}, issn = {15677095}, journal = {Journal of Cognition and Culture}, keywords = {and can include bodily,com-,cross-cultural comparisons,deployed physical displays with,emotional expressions are strategically,evolutionary psychology,facial movements and,gestures,municative function,speech,universals,vocal emotion}, month = apr, number = {1}, pages = {135--148}, title = {{Vocal Emotion Recognition Across Disparate Cultures}}, url = {http://openurl.ingenta.com/content/xref?genre=article\&issn=1567-7095\&volume=8\&issue=1\&spage=135}, volume = {8}, year = {2008} } @book{Doherty1998, author = {Doherty, William Joseph and Campbell, Thomas}, pages = {159}, publisher = {Stage Pubications}, title = {{Families and Health}}, year = {1998} } @article{Dapretto2006, abstract = {To examine mirror neuron abnormalities in autism, high-functioning children with autism and matched controls underwent fMRI while imitating and observing emotional expressions. Although both groups performed the tasks equally well, children with autism showed no mirror neuron activity in the inferior frontal gyrus (pars opercularis). Notably, activity in this area was inversely related to symptom severity in the social domain, suggesting that a dysfunctional 'mirror neuron system' may underlie the social deficits observed in autism.}, author = {Dapretto, Mirella and Davies, Mari S and Pfeifer, Jennifer H and Scott, Ashley a and Sigman, Marian and Bookheimer, Susan Y and Iacoboni, Marco}, doi = {10.1038/nn1611}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Dapretto et al. - 2006 - Understanding emotions in others mirror neuron dysfunction in children with autism spectrum disorders.pdf:pdf}, issn = {1097-6256}, journal = {Nature neuroscience}, keywords = {Autistic Disorder,Autistic Disorder: physiopathology,Autistic Disorder: psychology,Brain Mapping,Child,Emotions,Emotions: physiology,Empathy,Facial Expression,Female,Humans,Magnetic Resonance Imaging,Male,Neurons,Neurons: physiology,Social Perception}, month = jan, number = {1}, pages = {28--30}, pmid = {16327784}, title = {{Understanding emotions in others: mirror neuron dysfunction in children with autism spectrum disorders.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/16327784}, volume = {9}, year = {2006} } @phdthesis{Tatar1998, author = {Tatar, D.}, school = {Stanford University}, title = {{Social and Personal Effects of Preoccupied Listeners}}, type = {Thesis}, year = {1998} } @article{Cowell2005a, abstract = {For years, people have sought more natural means of communicating with their computers. Many have suggested that interaction with a computer should be as easy as interacting with other people, taking advantage of the multimodal nature of human communication. While users should, in theory, gravitate to such anthropomorphic embodiments, quite the contrary has been experienced; users generally have been dissatisfied and abandoned their use. This suggests a disconnect between factors that make human-human communication engaging and those used by designers to support human-agent interaction. This paper discusses a set of empirical studies that attempted to replicate human-human non-verbal behavior. The focus revolved around behaviors that portray a credible fa\c{c}ade, thereby helping embodied conversational agents (ECAs) to form a successful cooperative dyad with users. Based on a review of the non-verbal literature, a framework was created that identified trustworthy and credible non-verbal behaviors across five areas and formed design guidelines for character interaction. The design suggestions for those areas emanating from the facial region were experimentally supported but there was no concordant increase in perceived trust when bodily regions (posture, gesture) were added. In addition, in examining the importance of demographic elements in embodiment, it was found that users prefer to interact with characters that match their ethnicity and are young looking. There was no significant preference for gender. The implications of these results, as well as other interesting consequences are discussed.}, author = {Cowell, Andrew J. and Stanney, Kay M.}, doi = {10.1016/j.ijhcs.2004.11.008}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cowell, Stanney - 2005 - Manipulation of non-verbal interaction style and demographic embodiment to increase anthropomorphic computer(2).pdf:pdf}, issn = {10715819}, journal = {International Journal of Human-Computer Studies}, month = feb, number = {2}, pages = {281--306}, title = {{Manipulation of non-verbal interaction style and demographic embodiment to increase anthropomorphic computer character credibility}}, url = {http://ocw.tudelft.nl/fileadmin/ocw/opener/Manipulation\_of\_non-verbal\_interaction\_style\_and\_demographic\_embodiment\_to\_increase\_anthropomorphic\_computer\_character\_credibility.pdf}, volume = {62}, year = {2005} } @inproceedings{Huang2011, abstract = {Rapport, the feeling of being "in sync" with your conversational partners, is argued to underlie many desirable social effects. By generating proper verbal and nonverbal behaviors, virtual humans have been seen to create rapport during interactions with human users. In this paper, we introduce our approach to creating rapport following Tickle-Degnen and Rosenberg's threefactor (positivity, mutual attention and coordination) theory of rapport. By comparing with a previously published virtual agent, the Rapport Agent, we show that our virtual human predicts the timing of backchannel feedback and end-of-turn more precisely, performs more natural behaviors and, thereby creates much stronger feelings of rapport between users and virtual agents.}, address = {Reykjaavik, Iceland}, author = {Huang, Lixing and Morency, Louis-philippe and Gratch, Jonathan}, booktitle = {Proceedings of the 11th international conference on Intelligent virtual agents (IVA'11)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Huang, Morency, Gratch - 2011 - Virtual Rapport 2.0.pdf:pdf}, keywords = {coordination,mutual attention,positivity,rapport,virtual human}, pages = {68--79}, publisher = {Springer-Verlag Berlin, Heidelberg}, title = {{Virtual Rapport 2.0}}, year = {2011} } @article{Cerekovic2010, author = {\v{C}erekovi\'{c}, Aleksandra and Pand\v{z}i\'{c}, Igor S.}, doi = {10.1007/s11042-010-0530-2}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/\v{C}erekovi\'{c}, Pand\v{z}i\'{c} - 2010 - Multimodal behavior realization for embodied conversational agents.pdf:pdf}, issn = {1380-7501}, journal = {Multimedia Tools and Applications}, keywords = {character animation system,multimodal behavior realization,virtual characters}, month = apr, number = {1}, pages = {143--164}, title = {{Multimodal behavior realization for embodied conversational agents}}, url = {http://link.springer.com/10.1007/s11042-010-0530-2}, volume = {54}, year = {2010} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @inproceedings{Panagiotis2009, abstract = {Emotion identification is beginning to be considered as an essential feature in human-computer interaction. However, most of the studies are mainly focused on facial expression classifications and speech recognition and not much attention has been paid until recently to physiological pattern recognition. In this paper, an integrative approach is proposed to emotional interaction by fusing multi-modal signals. Subjects are exposed to pictures selected from the International Affective Picture System (IAPS). A feature extraction procedure is used to discriminate between four affective states by means of a Mahalanobis distance classifier. The average classifications rate (74.11\%) was encouraging. Thus, the induced affective state is mirrored through an avatar by changing its facial characteristics and generating a voice message sympathising with the user’s mood. It is argued that multi-physiological patterning in combination with anthropomorphic avatars may contribute to the enhancement of affective multi-modal interfaces and the advancement of machine emotional intelligence.}, address = {San Diego, CA, USA}, author = {Panagiotis, D and Christos, A and Evdokimos, I and Manousos, A}, booktitle = {Ambient, Ubiquitous and Intelligent Interaction. 13th International Conference on Human-Computer Interaction}, doi = {10.1007/978-3-642-02580-8}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Panagiotis et al. - 2009 - An integrated approach to emotion recognition for advanced emotional intelligence.pdf:pdf}, isbn = {9783642025808}, keywords = {Affective Computing,Avatar,EEG,Emotion,Mahalanobis,Skin Conductance,classifier}, number = {July}, pages = {565--574}, publisher = {Springer}, title = {{An integrated approach to emotion recognition for advanced emotional intelligence}}, year = {2009} } @article{Linden2003, abstract = {By comparing similar items rather than similar customers, item-to-item collaborative filtering scales to very large data sets and produces high-quality recommendations.}, author = {Linden, Greg and Smith, Brent and York, Jeremy}, doi = {10.1109/MIC.2003.1167344}, issn = {10897801}, journal = {IEEE Internet Computing}, number = {February}, pages = {76--80}, publisher = {IEEE Computer Society}, title = {{Amazon . com Recommendations: Item-to-Item Collaborative Filtering}}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.98.1658\&rep=rep1\&type=pdf}, volume = {7}, year = {2003} } @article{White2001, abstract = {Online support groups are expanding as the general public becomes more comfortable using computer-mediated communication technology. These support groups have certain benefits for users who may not be able to or do not have the desire to attend face-to-face sessions. Online support groups also present challenges when compared to traditional face-to-face group communication. Communication difficulties may arise resulting from lack of visual and aural cues found in traditional face-to-face communication. Online support groups have emerged within health care as a result of the need individuals have to know more about health conditions they are confronting. The proliferation of these online communities may provide an opportunity for health educators to reach target populations with specific messages. This paper reviews the development of health-related online support groups, examines research conducted within these communities, compares their utility with traditional support groups and discusses the implications of these groups for health education.}, author = {White, M and Dorman, S M}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/White, Dorman - 2001 - Receiving social support online implications for health education.pdf:pdf}, issn = {0268-1153}, journal = {Health education research}, keywords = {Attitude to Computers,Computer Communication Networks,Disease,Disease: psychology,Health Education,Health Education: methods,Health Services Accessibility,Humans,Interpersonal Relations,Online Systems,Patient Participation,Patient Participation: psychology,Self-Help Groups,Social Support,United States}, month = dec, number = {6}, pages = {693--707}, pmid = {11780708}, title = {{Receiving social support online: implications for health education.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/11780708}, volume = {16}, year = {2001} } @inproceedings{Fabri2007, abstract = {We present our work on emotionally expressive avatars, animated virtual characters that can express emotions via facial expressions. Because these avatars are highly distinctive and easily recognizable, they may be used in a range of applications. In the first part of the paper we present their use in computer mediated communication where two or more people meet in virtual space, each represented by an avatar. Study results suggest that social interaction behavior from the real-world is readily transferred to the virtual world. Empathy is identified as a key component for creating a more enjoyable experience and greater harmony between users. In the second part of the paper we discuss the use of avatars as an assistive, educational and therapeutic technology for people with autism. Based on the results of a preliminary study, we provide pointers regarding how people with autism may overcome some of the limitations that characterize their condition.}, address = {Beijing, China}, author = {Fabri, Marc and Elzouki, SYA}, booktitle = {Human-Computer Interaction, HCI Intelligent Multimodal Interaction Environments 12th International Conference}, doi = {10.1007/978-3-540-73110-8}, editor = {Jacko, Julie A.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Fabri, Elzouki - 2007 - Emotionally expressive avatars for chatting, learning and therapeutic intervention.pdf:pdf}, keywords = {Emotion,autism,avatar,education,empathy,facial messaging,therapeutic intervention,virtual reality}, pages = {275--285}, publisher = {Springer Berlin / Heidelberg}, title = {{Emotionally expressive avatars for chatting, learning and therapeutic intervention}}, url = {http://dl.acm.org/citation.cfm?id=1769621 http://www.springerlink.com/content/7gju6n38605hp3h2/}, year = {2007} } @article{Dada2006, abstract = {Images: p1372-a:}, author = {Dada, Michael}, journal = {Journal of the National Medical Association}, number = {8}, pages = {1372}, publisher = {Motivate Healthy Habits}, title = {{Motivational Practice: Promoting Healthy Habits and Self-Care of Chronic Diseases}}, volume = {98}, year = {2006} } @article{Feller2003, abstract = {In this investigation of the construct of empathy, the authors report that the literature reflects strong evidence that empathy is an essential component of the therapeutic alliance across theories and that empathy is necessary in the counseling process. The concept of empathy continues to be a central component of new forms of counseling and therapy.}, author = {Feller, C P and Cottone, R R}, journal = {Journal of Humanistic Counseling Education and Development}, number = {1}, pages = {53--62}, publisher = {American Counseling Association}, title = {{The Importance of Empathy in the Therapeutic Alliance.}}, volume = {42}, year = {2003} } @article{Heimgartner2011, author = {Heimg\"{a}rtner, R\"{u}diger and Tiede, L.W. and Windl, Helmut}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Heimg\"{a}rtner, Tiede, Windl - 2011 - Empathy as Key Factor for Successful Intercultural HCI Design.pdf:pdf}, journal = {Design, User Experience, and Usability. Theory, Methods, Tools and Practice}, keywords = {1 problems in hci,communication,cultural differences,culture,design caused by cultural,designing the functionality and,differences,empathy,intercultural communication,intercultural hci design,much cultural background has,to be considered when,understanding}, pages = {557--566}, publisher = {Springer}, title = {{Empathy as Key Factor for Successful Intercultural HCI Design}}, url = {http://www.springerlink.com/index/FG03081276H7K042.pdf}, year = {2011} } @incollection{Wallbott1995, author = {Wallbott, H G}, booktitle = {In I Eds pp 8298}, chapter = {Congruence}, editor = {Markova, I and Graumann, C F and Foppa, K}, pages = {82--98}, publisher = {Cambridge University Press}, title = {{Mutualities in dialogue}}, volume = {Cambridge}, year = {1995} } @article{Blairy1999, abstract = {Lipps (1907) presented a model of empathy which had an important influence on later formulations. According to Lipps, individuals tend to mimic an interaction partner's behavior, and this nonverbal mimicry induces—via a feedback process—the corresponding affective state in the observer. The resulting shared affect is believed to foster the understanding of the observed person's self. The present study tested this model in the context of judgments of emotional facial expressions. The results confirm that individuals mimic emotional facial expressions, and that the decoding of facial expressions is accompanied by shared affect. However, no evidence that emotion recognition accuracy or shared affect are mediated by mimicry was found. Yet, voluntary mimicry was found to have some limited influence on observer' s assessment of the observed person's personality. The implications of these results with regard to Lipps' original hypothesis are discussed.}, author = {Blairy, Sylvie and Herrera, Pedro and Hess, Ursula}, doi = {10.1023/A:1021370825283}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Blairy, Herrera, Hess - 1999 - Mimicry and the Judgment of Emotional Facial Expressions.pdf:pdf}, journal = {Journal of Nonverbal Behavior}, number = {1}, pages = {5--41}, title = {{Mimicry and the Judgment of Emotional Facial Expressions}}, url = {http://www.springerlink.com/content/unx02r46695w7651/ http://dx.doi.org/10.1023/A:1021370825283}, volume = {23}, year = {1999} } @article{Mattheij2013b, author = {Mattheij, R. and Postma-Nilsenov\'{a}, M. and Postma, E.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mattheij, Postma-Nilsenov\'{a}, Postma - 2013 - Mirror, Mirror in the Wall Is there mimicry in you all.pdf:pdf}, journal = {Journal of Ambient Intelligence and Smart Environments}, keywords = {embodied agent,facial expressions,imitation,mimicry,pitch,social signals}, pages = {1--5}, title = {{Mirror, Mirror in the Wall: Is there mimicry in you all?}}, url = {https://www2.csulb.edu/divisions/students/hrc/HRC\_Journal/documents/Journal\_Vol6No2.pdf\#page=3}, volume = {1}, year = {2013} } @article{Neiberg2006, author = {Neiberg, Daniel and Elenius, Kjell and Karlsson, Inger}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Neiberg, Elenius, Karlsson - 2006 - Emotion recognition in spontaneous speech.pdf:pdf}, journal = {Working Papers of Lund University, Centre for Languages \& Literature, Dept. of Linguistics \& Phonetics}, pages = {101--104}, title = {{Emotion recognition in spontaneous speech}}, url = {http://nile.lub.lu.se/ojs/index.php/LWPL/article/viewFile/2306/1881}, volume = {52}, year = {2006} } @inproceedings{Jiang2007, author = {Jiang, Hong and Vidal, J.M. and Huhns, M.N.}, booktitle = {Proceedings of the 6th international joint conference on Autonomous agents and multiagent systems}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Jiang, Vidal, Huhns - 2007 - EBDI an architecture for emotional agents.pdf:pdf}, keywords = {agent architecture,belief-desire-intention,emotional agent}, pages = {11}, publisher = {ACM}, title = {{EBDI: an architecture for emotional agents}}, url = {http://dl.acm.org/citation.cfm?id=1329139}, year = {2007} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @inproceedings{Wang2010, abstract = {Communication is more effective and persuasive when par- ticipants establish rapport. Tickle-Degnen and Rosenthal [57] argue rapport arises when participants exhibit mutual attentiveness, positivity and coordination. In this paper, we investigate how these factors relate to perceptions of rap- port when users interact via avatars in virtual worlds. In this study, participants told a story to what they believed was the avatar of another participant. In fact, the avatar was a computer program that systematically manipulated levels of attentiveness, positivity and coordination. In contrast to Tickel-Degnen and Rosenthal’s findings, its impact in a wide range of interpersonal domains includ- ing social engagement [52], classroom learning [22], suc- cess in negotiations [20], improving worker compliance [18], psychotherapeutic effectiveness [59], and improved quality of child care [11]. Recent research in virtual envi- ronments has demonstrated the possibility of translating these findings into computer-mediated (CMC) and human- computer interactions (HCI) where embodied communi- cated behaviors can not only be reproduced but altered in novel ways to perhaps amplify their interpersonal conse- quences [26] [5]. high-levels of mutual attentiveness alone can dramatically lower percep- tions of rapport in avatar communication. Indeed, an agent that attempted to maximize mutual attention performed as poorly as an agent that was designed to convey boredom. Adding positivity and coordination to mutual attentiveness, on the other hand, greatly improved rapport. This work un- veils the dependencies between components of rapport and informs the design of agents and avatars in computer medi- ated communication.}, address = {Atlanta, GA, USA}, author = {Wang, Ning and Gratch, Jonathan}, booktitle = {28th ACM Conference on Human Factors in Computing Systems (CHI'10)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wang, Gratch - 2010 - Don't just stare at me!.pdf:pdf}, keywords = {Virtual human,back-channel,gaze,head nod,pos- ture mirroring.,rapport}, pages = {1241--1249}, publisher = {ACM}, title = {{Don't just stare at me!}}, year = {2010} } @article{Ortony1990, abstract = {A widespread assumption in theories of emotion is that there exists a small set of basic emotions. From a biological perspective, this idea is manifested in the belief that there might be neurophysiological and anatomical substrates corresponding to the basic emotions. From a psychological perspective, basic emotions are often held to be the primitive building blocks of other, nonbasic emotions. The content of such claims is examined, and the results suggest that there is no coherent nontrivial notion of basic emotions as the elementary psychological primitives in terms of which other emotions can be explained. Thus, the view that there exist basic emotions out of which all other emotions are built, and in terms of which they can be explained, is questioned, raising the possibility that this position is an article of faith rather than an empirically or theoretically defensible basis for the conduct of emotion research. This suggests that perhaps the notion of basic emotions will not lead to significant progress in the field. An alternative approach to explaining the phenomena that appear to motivate the postulation of basic emotions is presented.}, author = {Ortony, A and Turner, T J}, institution = {Institute for the Learning Sciences, Northwestern University, Evanston, Illinois 60201.}, journal = {Psychological Review}, number = {3}, pages = {315--331}, pmid = {1669960}, publisher = {Citeseer}, title = {{What's basic about basic emotions?}}, url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/0033-295X.97.3.315}, volume = {97}, year = {1990} } @inproceedings{Polajnar2011, author = {Polajnar, Jernej and Dalvandi, B. and Polajnar, D.}, booktitle = {Cognitive Informatics \& Cognitive Computing (ICCI'CC'11), 2011 10th IEEE International Conference on}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Polajnar, Dalvandi, Polajnar - 2011 - Does empathy between artificial agents improve agent teamwork.pdf:pdf}, isbn = {9781457716973}, pages = {96--102}, publisher = {IEEE}, title = {{Does empathy between artificial agents improve agent teamwork?}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=6016126}, year = {2011} } @inproceedings{Breitfuss2007, address = {New York, New York, USA}, author = {Breitfuss, Werner and Prendinger, Helmut and Ishizuka, Mitsuru}, booktitle = {Proceedings of the ninth international conference on Multimodal interfaces - ICMI '07}, doi = {10.1145/1322192.1322247}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Breitfuss, Prendinger, Ishizuka - 2007 - Automated generation of non-verbal behavior for virtual embodied characters.pdf:pdf}, isbn = {9781595938176}, keywords = {interfaces,multi-modal presentation,multimodal input and output}, pages = {319--322}, publisher = {ACM Press}, title = {{Automated generation of non-verbal behavior for virtual embodied characters}}, url = {http://portal.acm.org/citation.cfm?doid=1322192.1322247}, year = {2007} } @article{Matsumoto2004, author = {Matsumoto, D. and Ekman, Paul}, journal = {Journal of Personality and Social Psychology}, number = {4}, pages = {529--540}, title = {{The relationship among expressions, labels, and descriptions of contempt}}, volume = {87}, year = {2004} } @incollection{Trivers1972, address = {Chicago, IL}, author = {Trivers, R. L.}, booktitle = {Sexual selection and the descent of man}, editor = {Campbell, B.}, pages = {136--179}, publisher = {Aldine}, title = {{Sexual selection and the descent of man}}, year = {1972} } @incollection{Chartrand2005, author = {Chartrand, T. L. and Maddux, W W and Lakin, J. L.}, booktitle = {The new unconscious}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Chartrand, Maddux, Lakin - 2005 - Beyond the perception-behavior link The ubiquitous utility and motivational moderators of nonconscious.pdf:pdf}, pages = {334--361}, publisher = {Oxford University Press New York}, title = {{Beyond the perception-behavior link: The ubiquitous utility and motivational moderators of nonconscious mimicry}}, year = {2005} } @article{Mao2009, abstract = {Neste trabalho \'{e} proposto um ambiente de aprendizado online inteligente em conjunto com um tutor afetivo. O tutor chama-se "Alice" e \'{e} capaz de reconhecer o estado afetivo dos estudantes por meio de express\~{o}es faciais, fala e texto, bem como se adaptar a ele. Segundo seus autores, o sistema \'{e} capaz de aumentar a produtividade dos estudantes por meio do uso de express\~{o}es faciais e de fala sint\'{e}tica emocional.}, author = {Mao, Xia and Li, Zheng}, doi = {10.1145/1520340.1520572}, isbn = {9781605582474}, journal = {Science}, keywords = {acm classification keywords,affective computing,intelligent e learning system,multimodal interaction,perceptive interfaces,virtual agent}, pages = {3787--3792}, publisher = {ACM Press}, series = {CHI EA '09}, title = {{Implementing emotion-based user-aware e-learning}}, url = {http://portal.acm.org/citation.cfm?doid=1520340.1520572}, year = {2009} } @article{Littlewort2011, author = {Littlewort, Gwen C and Whitehill, Jacob and Wu, T}, doi = {10.1109/AFGR.2008.4813406}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Littlewort, Whitehill, Wu - 2011 - The computer expression recognition toolbox (CERT).pdf:pdf}, isbn = {978-1-4244-2153-4}, journal = {Recognition and}, month = sep, pages = {1--2}, publisher = {Ieee}, title = {{The computer expression recognition toolbox (CERT)}}, year = {2011} } @article{Kissler2008, abstract = {We investigated the effect of emotional target content on the generation of pro- and anti-saccades. Subjects had to generate saccades towards (pro-saccade) or away from (anti-saccade) peripherally presented pleasant, unpleasant or neutral pictures. Two different SOAs were used, either with simultaneous fixation offset and target onset (no gap) or with fixation offset preceding target onset by 200 ms (gap). In the pro-saccade task participants were faster to respond to emotional pictures in the left visual field. In the right visual field facilitation occurred only for pleasant pictures and saccadic reaction times towards unpleasant pictures were slowed. In the anti-saccade task more anti-saccade errors towards emotional pictures (pleasant and unpleasant) were made in the gap condition. On the whole, endogenous saccade generation appears facilitated by emotional target content, probably via increased input from extra-striate and parietal brain areas to the superior colliculus. Moderating factors such as the SOA or the visual field of presentation are discussed.}, author = {Kissler, Johanna and Keil, Andreas}, doi = {10.1007/s00221-008-1358-0}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kissler, Keil - 2008 - Look-don't look! How emotional pictures affect pro- and anti-saccades.pdf:pdf}, issn = {1432-1106}, journal = {Experimental brain research. Experimentelle Hirnforschung. Exp\'{e}rimentation c\'{e}r\'{e}brale}, keywords = {Adult,Affect,Affect: physiology,Arousal,Arousal: physiology,Attention,Attention: physiology,Emotions,Emotions: physiology,Female,Fixation, Ocular,Fixation, Ocular: physiology,Functional Laterality,Functional Laterality: physiology,Humans,Male,Neuropsychological Tests,Orientation,Orientation: physiology,Parietal Lobe,Parietal Lobe: anatomy \& histology,Parietal Lobe: physiology,Pattern Recognition, Visual,Pattern Recognition, Visual: physiology,Photic Stimulation,Reaction Time,Reaction Time: physiology,Saccades,Saccades: physiology,Signal Processing, Computer-Assisted,Superior Colliculi,Superior Colliculi: anatomy \& histology,Superior Colliculi: physiology,Visual Cortex,Visual Cortex: anatomy \& histology,Visual Cortex: physiology,Visual Fields,Visual Fields: physiology,Visual Pathways,Visual Pathways: anatomy \& histology,Visual Pathways: physiology}, month = jun, number = {2}, pages = {215--22}, pmid = {18368396}, title = {{Look-don't look! How emotional pictures affect pro- and anti-saccades.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18368396}, volume = {188}, year = {2008} } @inproceedings{Thiebaux2008, abstract = {Researchers demand much from their embodied conversational agents (ECAs), requiring them to be both life-like, as well as responsive to events in an interactive setting. We nd that a exible combination of animation approaches may be needed to satisfy these needs. In this paper we present SmartBody, an open source modular framework for animating ECAs in real time, based on the notion of hierarchically connected animation controllers. Controllers in SmartBody can employ arbitrary animation algorithms such as keyframe interpolation, motion capture or procedural animation. Controllers can also schedule or combine other controllers. We discuss our architecture in detail, including how we incorporate traditional approaches, and develop the notion of a controller as a reactive module within a generic framework, for realizing modular animation control. To illustrate the versatility of the architecture, we also discuss a range of applications that have used SmartBody successfully.}, address = {Estoril, Portugal}, author = {Thiebaux, Marcus and Rey, Marina and Marshall, Andrew N and Marsella, Stacy and Kallmann, Marcelo}, booktitle = {Proceeding of 7th International Conference on Autonomous Agents and Multiagent Systems (AAMAS 2008)}, editor = {{Padgham, Parkes}, M\"{u}ller and Parsons}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Thiebaux et al. - 2008 - SmartBody Behavior Realization for Embodied Conversational Agents.pdf:pdf}, keywords = {character an-,conversational characters,virtual humans}, number = {Aamas}, pages = {12--16}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems (www.ifaamas.org)}, title = {{SmartBody : Behavior Realization for Embodied Conversational Agents}}, year = {2008} } @techreport{Noh1998, author = {Noh, Jun-yong and Neumann, Ulrich}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Noh, Neumann - 1998 - A survey of facial modeling and animation techniques.pdf:pdf}, institution = {Integrated Media Systems Center, University of Southern California}, pages = {1--26}, title = {{A survey of facial modeling and animation techniques}}, url = {http://pdf.aminer.org/000/318/890/synthesizing\_lateral\_face\_from\_frontal\_facial\_image\_using\_human\_anthropometric.pdf}, year = {1998} } @article{Stockwell1983, author = {Stockwell, T and Murphy, D and Hodgson, R}, journal = {British journal of addiction}, number = {2}, pages = {145--155}, pmid = {6135435}, title = {{The severity of alcohol dependence questionnaire: its use, reliability and validity.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/6135435}, volume = {78}, year = {1983} } @inproceedings{Cairco2007, address = {Newport Beach, California, USA}, author = {Cairco, Lauren and Babu, Sabarish and Ulinski, Amy and Zanbaka, Catherine and Hodges, Larry F.}, booktitle = {Proceedings of the 2007 ACM symposium on Virtual reality software and technology (VRST '07)}, doi = {10.1145/1315184.1315239}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cairco et al. - 2007 - Shakespearean karaoke.pdf:pdf}, isbn = {9781595938633}, pages = {239--240}, publisher = {ACM Press}, title = {{Shakespearean karaoke}}, url = {http://portal.acm.org/citation.cfm?doid=1315184.1315239}, year = {2007} } @article{Cohn2004, abstract = {The diaries of 1,084 U.S. users of an on-line journaling service were downloaded for a period of 4 months spanning the 2 months prior to and after the September 11 attacks. Linguistic analyses of the journal entries revealed pronounced psychological changes in response to the attacks. In the short term, participants expressed more negative emotions, were more cognitively and socially engaged, and wrote with greater psychological distance. After 2 weeks, their moods and social referencing returned to baseline, and their use of cognitive-analytic words dropped below baseline. Over the next 6 weeks, social referencing decreased, and psychological distancing remained elevated relative to baseline. Although the effects were generally stronger for individuals highly preoccupied with September 11, even participants who hardly wrote about the events showed comparable language changes. This study bypasses many of the methodological obstacles of trauma research and provides a fine-grained analysis of the time line of human coping with upheaval.}, author = {Cohn, Michael A and Mehl, Matthias R and Pennebaker, James W}, institution = {University of Michigan, USA.}, journal = {Psychological Science}, keywords = {adaptation,adult,affect,cognition,female,humans,linguistics,male,psychological,terrorism}, number = {10}, pages = {687--693}, pmid = {15447640}, publisher = {SAGE Publications}, title = {{Linguistic markers of psychological change surrounding September 11, 2001.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/15447640}, volume = {15}, year = {2004} } @article{Peter2003, author = {Sonnby-borgstr\"{o}m, Marianne and Jonsson, Peter and Svensson, Owe}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sonnby-borgstr\"{o}m, Jonsson, Svensson - 2003 - Emotional empathy as related to mimicry reactions at different levels of information proces.pdf:pdf}, journal = {Journal of Nonverbal}, keywords = {emg,emotional contagion,empathy,facial expressions,facial mim-,icry,mirror neurons}, number = {1}, pages = {3--23}, title = {{Emotional empathy as related to mimicry reactions at different levels of information processing}}, url = {http://www.springerlink.com/index/P81X69QTH751V836.pdf}, volume = {27}, year = {2003} } @article{Boukricha2011, abstract = {Allowing virtual humans to align to others’ perceived emotions is believed to enhance their cooperative and communicative social skills. In our work, emotional alignment is realized by endowing a virtual human with the ability to empathize. Recent research shows that humans empathize with each other to different degrees depending on several factors including, among others, their mood, their personality, and their social relationships. Although providing virtual humans with features like affect, personality, and the ability to build social relationships, little attention has been devoted to the role of such features as factors modulating their empathic behavior. Supported by psychological models of empathy, we propose an approach to model empathy for the virtual human EMMA—an Empathic MultiModal Agent—consisting of three processing steps: First, the Empathy Mechanism by which an empathic emotion is produced. Second, the Empathy Modulation by which the empathic emotion is modulated. Third, the Expression of Empathy by which EMMA’s multiple modalities are triggered through the modulated empathic emotion. The proposed model of empathy is illustrated in a conversational agent scenario involving the virtual humans MAX and EMMA.}, author = {Boukricha, Hana and Wachsmuth, Ipke}, doi = {10.1007/s13218-011-0109-8}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Boukricha, Wachsmuth - 2011 - Empathy-Based Emotional Alignment for a Virtual Human A Three-Step Approach.pdf:pdf}, issn = {0933-1875}, journal = {KI - K\"{u}nstliche Intelligenz}, keywords = {agent-agent interaction,empathic virtual humans,human-agent,internal simulation}, month = may, number = {3}, pages = {195--204}, title = {{Empathy-Based Emotional Alignment for a Virtual Human: A Three-Step Approach}}, url = {http://www.springerlink.com/content/9322738p4101p94w/}, volume = {25}, year = {2011} } @techreport{Bradley1999, author = {Bradley, Margaret M and Lang, PJ}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bradley, Lang - 1999 - Affective norms for English words (ANEW) Instruction manual and affective ratings.pdf:pdf}, institution = {Technical Report C-1, The Center for Research in Psychophysiology, University of Florida}, title = {{Affective norms for English words (ANEW): Instruction manual and affective ratings}}, url = {http://www.uvm.edu/~pdodds/research/papers/others/1999/bradley1999a.pdf}, year = {1999} } @inproceedings{Lisetti2008, abstract = {In this article, we explore how Embodied Conversational Agents (ECAs) or avatars could be used as social orthotics defined as therapeutic computer- based social companions aimed at promoting healthy behaviors. We review some of the latest related progress and identify specific features of ECAs that are important – if not necessary – to include in the design of social orthotic systems.}, author = {Lisetti, Christine L}, booktitle = {Proceedings of the CHI 2008 Conference Workshop on Technology in Mental Health}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lisetti - 2008 - Embodied Conversational Agents for Psychotherapy.pdf:pdf}, keywords = {affective computing,agents,avatars,embodied conversational,psychotherapy,social orthotics}, pages = {1--12}, publisher = {ACM}, title = {{Embodied Conversational Agents for Psychotherapy}}, year = {2008} } @article{Feshbach1968, author = {Feshbach, Norma Deitch and Roe, K}, journal = {Child Development}, number = {1}, pages = {133--145}, pmid = {5645790}, title = {{Empathy in six- and seven-year-olds.}}, volume = {39}, year = {1968} } @article{Norfolk2007, abstract = {CONTEXT: Considerable research has been conducted recently into the notion of patient-centred consulting. The primary goal of this approach is to establish a clear understanding of the patient's perspective on his or her problem, and to allow this understanding to inform both the explanation and planning stages of the consultation. The quality of this understanding is largely determined by the empathic accuracy achieved by the doctor; the primary benefit is a therapeutic rapport between doctor and patient. METHODS: To highlight the role of empathy and communication skills in establishing rapport, we initially developed a model which seeks to draw the various motivational and skill elements identified in separate research papers into a comprehensive model of the journey towards shared understanding between doctor and patient. We then conducted an initial validation of the model via qualitative analysis involving general practitioners (GPs) and clinical psychologists. RESULTS: The validation offered encouraging support for the principal elements of the model. Specific suggestions for clarification and extension were then incorporated in a revised model. CONCLUSIONS: The model appears to capture the dynamic process of establishing a therapeutic relationship (rapport) between doctor and patient, defined by the quality of the doctor's understanding of the patient's perspective on his or her problem. Arguably, the most important contribution of the model is to highlight the fact that 'empathy' and consequent 'rapport' are not mystical or exclusive concepts but, rather, involve the use of specific skills accessible at some level by all.}, author = {Norfolk, Tim and Birdi, Kamal and Walsh, Deirdre}, doi = {10.1111/j.1365-2923.2007.02789.x}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Norfolk, Birdi, Walsh - 2007 - The role of empathy in establishing rapport in the consultation a new model.pdf:pdf}, issn = {0308-0110}, journal = {Medical education}, keywords = {Attitude of Health Personnel,Communication,Empathy,Family Practice,Family Practice: standards,Humans,Models, Theoretical,Motivation,Patient-Centered Care,Patient-Centered Care: standards,Physician-Patient Relations,Terminology as Topic}, month = jul, number = {7}, pages = {690--7}, pmid = {17614890}, title = {{The role of empathy in establishing rapport in the consultation: a new model.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17614890}, volume = {41}, year = {2007} } @article{Buck1974, author = {Buck, R. and Miller, R. E. and Caul, W. F.}, journal = {Journal of Personality and Social Psychology}, pages = {587--596}, title = {{Sex, personality, and physiological variables in the communication of affect via facial expression}}, volume = {30}, year = {1974} } @article{Pardas2002, abstract = {The video analysis system described in this paper aims at facial expression recognition consistent with the MPEG4 standardized parameters for facial animation, FAP. For this reason, two levels of analysis are necessary: low level analysis to extract the MPEG4 compliant parameters and high level analysis to estimate the expression of the sequence using these low level parameters. The low level analysis is based on an improved active contour algorithm that uses high level information based on Principal Component Analysis to locate the most significant contours of the face (eyebrows and mouth), and on motion estimation to track them. The high level analysis takes as input the FAP produced by the low level analysis tool and, by means of a Hidden Markov Model classifier, detects the expression of the sequence.}, author = {Pard\`{a}s, Montse and Bonafonte, Antonio}, doi = {10.1016/S0923-5965(02)00078-4}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pard\`{a}s, Bonafonte - 2002 - Facial animation parameters extraction and expression recognition using Hidden Markov Models.pdf:pdf}, issn = {09235965}, journal = {Signal Processing: Image Communication}, month = oct, number = {9}, pages = {675--688}, title = {{Facial animation parameters extraction and expression recognition using Hidden Markov Models}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0923596502000784}, volume = {17}, year = {2002} } @inproceedings{Hegel2006, author = {Hegel, Frank and Spexard, Torsten and Wrede, Britta and Horstmann, G. and Vogt, T.}, booktitle = {Humanoid Robots, 2006 6th IEEE-RAS International Conference on}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hegel et al. - 2006 - Playing a different imitation game Interaction with an Empathic Android Robot.pdf:pdf}, isbn = {142440200X}, pages = {56--61}, publisher = {IEEE}, title = {{Playing a different imitation game: Interaction with an Empathic Android Robot}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=4115580}, year = {2006} } @article{Boucouvalas2003, abstract = {In this work we focus on demonstrating a real time communication interface which enhances text communication by detecting from real time typed text, the extracted emotions, and displaying on the screen appropriate facial expression images in real time. The displayed expressions are represented in terms of expressive images or sketches of the communicating persons. This interface makes use of a developed real time emotion extraction engine from text. The emotion extraction engine and extraction rules are discussed together with a description of the interface, its limits and future direction of such interface. The extracted emotions are mapped into displayed facial expressions. Such interface can be used as a platform for a number of future CMC experiments. The developed online communication interface brings together remotely located collaborating parties in a shared electronic spacefor their communication. In its current state the interface allows the participant to see at a glance all other online participants and all those who are engaged in communications. An important aspect of the interface is that for two users engaged in communication, the interface locally extracts emotional states from the content of typed textual sentences automatically. Subsequently it displays discrete expressions mapped from extracted emotions to the remote screen of the other person. It also analyses/extracts the intensity/duration of the emotional state. At the same time the users can also control their expression, if they wish, manually. The interface also uses text to speech synthesis, which allows the user to glance on other tasks while at the same time listening to the communication. A shared whiteboard also allows the users to engage in collaborative work. Finally it is also possible to view your own expression (feedback) which is displayed and viewed by the other user, an add on feature not possible with face to face communication between two people.}, author = {Boucouvalas, Anthony C}, chapter = {21}, editor = {Riva, G and Davide, F and Jsselsteijn, W A I}, journal = {Emotion}, pages = {305--318}, publisher = {Ios Press}, title = {{Real Time Text-to-Emotion Engine for Expressive Internet Communications}}, url = {http://scholar.google.com/scholar?hl=en\&btnG=Search\&q=intitle:Real+Time+Text-to-Emotion+Engine+for+Expressive+Internet+Communications\#1}, volume = {5}, year = {2003} } @inproceedings{Oliver2006, author = {Oliver, Nuria and Flores-mangas, Fernando}, booktitle = {Proceedings of the International Workshop on Wearable and Implantable Body Sensor Networks (BSN’06)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Oliver, Flores-mangas - 2006 - HealthGear A Real-time Wearable System for Monitoring and Analyzing Physiological Signals Automatic Dete.pdf:pdf}, isbn = {0769525474}, pages = {1--4}, publisher = {IEEE Computer Society}, title = {{HealthGear : A Real-time Wearable System for Monitoring and Analyzing Physiological Signals Automatic Detection of Sleep Apnea}}, volume = {c}, year = {2006} } @phdthesis{Lisetti2011, author = {Lisetti, Christine L}, school = {Florida International University}, title = {{What Kind of Emotions Are There? Structure of Emotion}}, type = {Lecture}, year = {2011} } @article{Rebolledo-Mendez2009, author = {Rebolledo-Mendez, Genaro and Freitas, Sara De and Gaona, Alma Rosa Garcia}, doi = {10.1109/VS-GAMES.2009.33}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rebolledo-Mendez, Freitas, Gaona - 2009 - A Model of Motivation Based on Empathy for AI-Driven Avatars in Virtual Worlds.pdf:pdf}, isbn = {978-0-7695-3588-3}, journal = {2009 Conference in Games and Virtual Worlds for Serious Applications}, keywords = {- empathy,alma rosa garcia gaona,facultad de inform\'{a}tica,motivation,serious games,universidad veracruzana}, month = mar, pages = {5--11}, publisher = {Ieee}, title = {{A Model of Motivation Based on Empathy for AI-Driven Avatars in Virtual Worlds}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5116547}, year = {2009} } @article{Mehrabian1967, author = {Mehrabian, Albert and Ferris, S R}, journal = {Journal of Consulting Psychology}, keywords = {attitude,communication,facial expression,female,humans,verbal behavior}, number = {3}, pages = {248--252}, pmid = {6046577}, title = {{Inference of attitudes from nonverbal communication in two channels.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/6046577}, volume = {31}, year = {1967} } @techreport{Picard1995, address = {Cambridge, MA}, author = {Picard, Rosalind W. and Klein, Jonathan}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Picard, Klein - 1995 - Computers that Recognise and Respond to User Emotion Theoretical and Practical Implications.pdf:pdf}, institution = {MIT Media Lab}, keywords = {1 introduction,affective computing,emotion and users,emotional needs,empathetic,frustration,human-centred design,interface,social interface,user emotion}, number = {Spitz 1945}, pages = {1--26}, title = {{Computers that Recognise and Respond to User Emotion : Theoretical and Practical Implications}}, year = {1995} } @article{Bestgen1994, abstract = {In spite of the growing interest witnessed in the study of the relationship between emotion and language, the determination of the emotional valence of sentences, paragraphs or texts has so far attracted little attention. To bridge this gap, a technique based on the emotional aspect of words is presented. In this preliminary study, we have compared the affective tones of the sentences of four texts as perceived by readers, to the values generated by the words that compose the texts. The results support the psychological reality of the affective tones of linguistic units larger than a word, and the possibility of their evaluation through the lexical information. Such information should be useful for studying the role of emotional interest on text processing and for the analysis of the natural stories produced by people in reaction to stressful events.}, author = {Bestgen, Yves}, doi = {10.1080/02699939408408926}, issn = {02699931}, journal = {Cognition \& Emotion}, number = {1}, pages = {21--36}, title = {{Can emotional valence in stories be determined from words?}}, url = {http://www.informaworld.com/openurl?genre=article\&doi=10.1080/02699939408408926\&magic=crossref}, volume = {8}, year = {1994} } @article{Moon2000, author = {Moon, Youngme}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Moon - 2000 - Intimate Exchanges Using Computers to Elicit Self-Disclosure from Consumers.pdf:pdf}, journal = {JOurnal of Consumer Research}, number = {4}, pages = {323--339}, title = {{Intimate Exchanges : Using Computers to Elicit Self-Disclosure from Consumers}}, volume = {26}, year = {2000} } @inproceedings{Sourina2011a, abstract = {To make human computer interfaces more immersive and intuitive, a new dimension could be added. Real-time brain state recognition from EEG in- cluding emotion recognition and level of concentration recognition would make an access to information more adaptive and personalized. Modern EEG tech- niques give us an easy and portable way to monitor brain activities by using suitable signal processing and classification methods and algorithms. We pro- posed a new subject-dependent fractal-based approach to brain state recognition and innovative applications based on EEG-enable user’s interaction. The algo- rithms of the “inner” brain state quantification including emotion recognition would advance research on human computer interaction bringing the proposed novel objective quantification methods and algorithms as new tools in medical, entertainment, and even digital art methodology applications, and allowing us an integration of the brain state quantification algorithms in the human com- puter interfaces. In this paper, we describe our fractal-based approach to the brain state recognition and its EEG-enable applications such as serious games, emotional avatar, music therapy, music player, and storytelling.}, author = {Sourina, Olga and Liu, Yisi and Wang, Qiang and Nguyen, Minh Khoa}, booktitle = {Proceedings of the 6th international conference on Universal access in human-computer interaction: users diversity - Volume Part II (UAHCI'11)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sourina et al. - 2011 - EEG-Based Personalized Digital Experience.pdf:pdf}, keywords = {BCI,HCI,emotion recognition,fractal dimension,music therapy,serious game,storytelling.}, pages = {591--599}, publisher = {Springer-Verlag Berlin, Heidelberg}, title = {{EEG-Based Personalized Digital Experience}}, year = {2011} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{Zahn-waxler1992, author = {Zahn-Waxler, Carolyn and Robinson, JoAnn L. and Emde, Robert N.}, doi = {10.1037//0012-1649.28.6.1038}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Zahn-Waxler, Robinson, Emde - 1992 - The development of empathy in twins.pdf:pdf}, issn = {0012-1649}, journal = {Developmental Psychology}, number = {6}, pages = {1038--1047}, title = {{The development of empathy in twins.}}, volume = {28}, year = {1992} } @incollection{Batson1987, address = {Cambridge}, author = {Batson, C. D. and Fultz, J. and Schoenrade, P. A.}, booktitle = {Empathy and its development}, editor = {Eisenberg, N. and Strayer, J.}, pages = {163--185}, publisher = {Cambridge University Press}, title = {{Adults' emotional reactions to the distress of others}}, year = {1987} } @inproceedings{Attfield2011, abstract = {User engagement is a key concept in designing user-centred web applications. It refers to the quality of the user experi- ence that emphasises the positive aspects of the interaction, and in particular the phenomena associated with being cap- tivated by technology. This definition is motivated by the observation that successful technologies are not just used, but they are engaged with. Numerous methods have been proposed in the literature to measure engagement, however, little has been done to validate and relate these measures and so provide a firm basis for assessing the quality of the user experience. Engagement is heavily influenced, for ex- ample, by the user interface and its associated process flow, the user’s context, value system and incentives. In this paper we propose an approach to relating and de- veloping unified measures of user engagement. Our ulti- mate aim is to define a framework in which user engagement can be studied, measured, and explained, leading to recom- mendations and guidelines for user interface and interaction design for front-end web technology. Towards this aim, in this paper, we consider how existing user engagement met- rics, web analytics, information retrieval metrics, and mea- sures from immersion in gaming can bring new perspective to defining, measuring and explaining user engagement.}, address = {Hong Kong, China}, author = {Attfield, Simon and Kazai, Gabriella}, booktitle = {WSDM Workshop on User Modelling for Web Applications}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Attfield, Kazai - 2011 - Towards a science of user engagement (Position Paper).pdf:pdf}, isbn = {9781450304931}, keywords = {Evaluation,User engagement,User experience,evaluation,user engagement,user experience}, mendeley-tags = {Evaluation,User engagement,User experience}, title = {{Towards a science of user engagement (Position Paper)}}, url = {http://www.dcs.gla.ac.uk/~mounia/Papers/engagement.pdf}, year = {2011} } @book{Davis1994, author = {Davis, Mark H.}, isbn = {0697168948}, publisher = {Westview Press}, title = {{Empathy: A social psychological approach}}, year = {1994} } @article{Mehrabian1969, author = {Mehrabian, Albert}, journal = {Psychological Bulletin}, pages = {359--372}, title = {{Significance of posture and position in the communication of attitude and status relationships}}, volume = {71}, year = {1969} } @article{Tracy2009a, author = {Tracy, J. L. and Robins, R. W. and Schriber, R. A.}, doi = {10.1037/a0015766}, journal = {Emotion}, pages = {554--559}, title = {{Development of a FACS-verified set of basic and self-conscious emotion expressions}}, volume = {9}, year = {2009} } @book{Halliday1977, author = {Halliday, Michael Alexander Kirkwood}, publisher = {Elsevier North-Holland}, title = {{Explorations in the functions of language}}, year = {1977} } @article{Sabourin, author = {Sabourin, Jennifer and Mott, Bradford and Lester, James}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sabourin, Mott, Lester - Unknown - Computational Models of Affect and Empathy for Pedagogical Virtual Agents.pdf:pdf}, journal = {lorentzcenter.nl}, keywords = {empathetic virtual agents,pedagogical agents,virtual learning}, title = {{Computational Models of Affect and Empathy for Pedagogical Virtual Agents}}, url = {http://www.lorentzcenter.nl/lc/web/2011/464/presentations/Sabourin.pdf} } @article{Cogger1982, author = {Cogger, J W}, journal = {The Personnel journal}, number = {11}, pages = {840--843}, pmid = {10258019}, title = {{Are you a skilled interviewer?}}, volume = {61}, year = {1982} } @incollection{Ekman1982, address = {New York}, author = {Ekman, Paul and Friesen, W V and Ellsworth, P}, booktitle = {Emotion in the human face}, editor = {Ekman, Paul}, pages = {39--55}, publisher = {Cambridge University Press}, title = {{What emotion categories or dimensions can observers judge from facial behavior?}}, year = {1982} } @article{Turunen2011, abstract = {Multimodal conversational spoken dialogues using physical and virtual agents provide a potential interface to motivate and support users in the domain of health and fitness. This paper describes how such multimodal conversational Companions can be implemented to support their owners in various pervasive and mobile settings. We present concrete system architectures, virtual, physical and mobile multimodal interfaces, and interaction management techniques for such Companions. In particular how knowledge representation and separation of low-level interaction modelling from high-level reasoning at the domain level makes it possible to implement distributed, but still coherent, interaction with Companions. The distribution is enabled by using a dialogue plan to communicate information from domain level planner to dialogue management and from there to a separate mobile interface. The model enables each part of the system to handle the same information from its own perspective without containing overlapping logic, and makes it possible to separate task-specific and conversational dialogue management from each other. In addition to technical descriptions, results from the first evaluations of the Companions interfaces are presented.}, author = {Turunen, Markku and Hakulinen, Jaakko and St\aa hl, Olov and Gamb\"{a}ck, Bj\"{o}rn and Hansen, Preben and {Rodr\'{\i}guez Gancedo}, Mari C. and de la C\'{a}mara, Ra\'{u}l Santos and Smith, Cameron and Charlton, Daniel and Cavazza, Marc}, doi = {10.1016/j.csl.2010.04.004}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Turunen et al. - 2011 - Multimodal and mobile conversational Health and Fitness Companions.pdf:pdf}, issn = {08852308}, journal = {Computer Speech \& Language}, keywords = {cognitive modelling,companions,conversational spoken dialogue systems,embodied conversational agents,mobile interfaces}, month = apr, number = {2}, pages = {192--209}, publisher = {Elsevier Ltd}, title = {{Multimodal and mobile conversational Health and Fitness Companions}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0885230810000355}, volume = {25}, year = {2011} } @phdthesis{Yi-ChenHsu2011, author = {{Yi-Chen Hsu}}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Yi-Chen Hsu - 2011 - Affective Interfaces of Embodied Conversational Agents Studies of hardware and Character Interfaces.pdf:pdf}, number = {October}, school = {The University of New South Wales}, title = {{Affective Interfaces of Embodied Conversational Agents: Studies of hardware and Character Interfaces}}, type = {PhD Thesis}, year = {2011} } @book{Thayer1996, author = {Thayer, Robert E.}, isbn = {9780195118056}, pages = {288}, publisher = {Oxford University Press}, title = {{The Origin of Everyday Moods: Managing Energy, Tension, and Stress}}, year = {1996} } @article{Blair2005, abstract = {Empathy is a lay term that is becoming increasingly viewed as a unitary function within the field of cognitive neuroscience. In this paper, a selective review of the empathy literature is provided. It is argued from this literature that empathy is not a unitary system but rather a loose collection of partially dissociable neurocognitive systems. In particular, three main divisions can be made: cognitive empathy (or Theory of Mind), motor empathy, and emotional empathy. The two main psychiatric disorders associated with empathic dysfunction are considered: autism and psychopathy. It is argued that individuals with autism show difficulties with cognitive and motor empathy but less clear difficulties with respect to emotional empathy. In contrast, individuals with psychopathy show clear difficulties with a specific form of emotional empathy but no indications of impairment with cognitive and motor empathy.}, author = {Blair, R J R}, doi = {10.1016/j.concog.2005.06.004}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Blair - 2005 - Responding to the emotions of others dissociating forms of empathy through the study of typical and psychiatric populatio.pdf:pdf}, issn = {1053-8100}, journal = {Consciousness and cognition}, keywords = {Affect,Affect: physiology,Autistic Disorder,Autistic Disorder: physiopathology,Brain,Brain: physiopathology,Cognition,Cognition: physiology,Empathy,Humans,Mental Disorders,Mental Disorders: physiopathology,Social Perception}, month = dec, number = {4}, pages = {698--718}, pmid = {16157488}, title = {{Responding to the emotions of others: dissociating forms of empathy through the study of typical and psychiatric populations}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/16157488}, volume = {14}, year = {2005} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @inproceedings{Baccianella2010, abstract = {In this work we present SENTIWORDNET 3.0, a lexical resource explicitly devised for supporting sentiment classification and opinion mining applications. SENTIWORDNET 3.0 is an improved version of SENTIWORDNET 1.0, a lexical resource publicly available for research purposes, now currently licensed to more than 300 research groups and used in a variety of research projects worldwide. Both SENTIWORDNET 1.0 and 3.0 are the result of automatically annotating all WORDNET synsets according to their degrees of positivity, negativity, and neutrality. SENTIWORDNET 1.0 and 3.0 differ (a) in the versions of WORDNET which they annotate (WORDNET 2.0 and 3.0, respectively), (b) in the algorithm used for automatically annotating WORDNET, which now includes (additionally to the previous semi-supervised learning step) a random-walk step for refining the scores. We here discuss SENTIWORDNET 3.0, especially focussing on the improvements concerning aspect (b) that it embodies with respect to version 1.0. We also report the results of evaluating SENTIWORDNET 3.0 against a fragment of WORDNET 3.0 manually annotated for positivity, negativity, and neutrality; these results indicate accuracy improvements of about 20\% with respect to SENTIWORDNET 1.0.}, address = {Valletta, Malta}, author = {Baccianella, Stefano and Esuli, Andrea and Sebastiani, Fabrizio}, booktitle = {Proceedings of the Seventh International Conference on Language Resources and Evaluation (LREC'10)}, editor = {{Nicoletta Calzolari, Khalid Choukri, Bente Maegaard, Joseph Mariani, Jan Odijk, Stelios Piperidis, Mike Rosner}, Daniel Tapias}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Baccianella, Esuli, Sebastiani - 2010 - SENTIWORDNET 3.0 An Enhanced Lexical Resource for Sentiment Analysis and Opinion Mining.pdf:pdf}, pages = {2200--2204}, publisher = {European Language Resources Association (ELRA)}, title = {{SENTIWORDNET 3.0 : An Enhanced Lexical Resource for Sentiment Analysis and Opinion Mining}}, volume = {0}, year = {2010} } @article{Gianakos1996, author = {Gianakos, D.}, journal = {Archives of Internal Medicine}, pages = {135--136}, title = {{Empathy revisited}}, volume = {156}, year = {1996} } @inproceedings{Cavazza2010, abstract = {This paper presents a dialogue system in the form of an ECA that acts as a socia- ble and emotionally intelligent compan- ion for the user. The system dialogue is not task-driven but is social conversation in which the user talks about his/her day at the office. During conversations the system monitors the emotional state of the user and uses that information to in- form its dialogue turns. The system is able to respond to spoken interruptions by the user, for example, the user can in- terrupt to correct the system. The system is already fully implemented and aspects of actual output will be used to illustrate.}, address = {The University of Tokyo}, author = {Cavazza, Marc and Vargas, C Emilio and Gil, Jos\'{e} Rela\~{n}o and Telef\'{o}nica, I D and Crook, Nigel and Field, Debora and Sheffield, S}, booktitle = {Proceedings of SIGDIAL 2010: the 11th Annual Meeting of the Special Interest Group on Discourse and Dialogue}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cavazza et al. - 2010 - ‘ How was your day ’ An affective companion ECA prototype.pdf:pdf}, pages = {277--280}, publisher = {Association for Computational Linguistics}, title = {{‘ How was your day ?’ An affective companion ECA prototype}}, volume = {1}, year = {2010} } @article{Mori1970, abstract = {CiteULike is a free online bibliography manager. Register and you can start organising your references online. Tags. Bukimi no tani . by: M. Mori. No URLs defined. Abstract. First description of theory. mrosenki's tags for this article.}, author = {Mori, Masahiro}, doi = {10.1162/pres.16.4.337}, issn = {02721716}, journal = {Energy}, number = {4}, pages = {33--35}, publisher = {Springer Wien NewYork}, title = {{The Uncanny Valley}}, url = {http://www.movingimages.info/digitalmedia/wp-content/uploads/2010/06/MorUnc.pdf}, volume = {7}, year = {1970} } @article{Rogers1957, author = {Rogers, C R}, editor = {Kirschenbaum, H}, isbn = {9780395483572}, issn = {00958891}, journal = {Journal of Consulting Psychology}, number = {2}, pages = {95--103}, pmid = {13416422}, publisher = {Houghton Mifflin}, title = {{The necessary and sufficient conditions of therapeutic personality change}}, volume = {21}, year = {1957} } @inproceedings{Knoppel2008, abstract = {DEIRA is a virtual agent commenting on virtual horse races in real time. DEIRA analyses the state of the race, acts emotionally and comments about the situation in a believable and engaging way, using synthesized speech and facial expressions. In this paper we discuss the challenges, explain the computational models for the cognitive, emotional and communicative behavior, and account on implementation and feedback from users.}, address = {Estoril, Portugal}, author = {Knoppel, Fran\c{c}ois L A and Tigelaar, Almer S and Bos, Danny Oude and Alofs, Thijs and Ruttkay, Zs\'{o}fia}, booktitle = {7th international joint conference on Autonomous agents and multiagent systems}, editor = {Padgham and Parkes and M\"{u}ller and Parsons}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Knoppel et al. - 2008 - Trackside DEIRA A Dynamic Engaging Intelligent Reporter Agent.pdf:pdf}, keywords = {emotion,facial expressions,intelligent virtual agent,modeling,multimodal communication,synthetic speech}, pages = {112--119}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems (www.ifaamas.org)}, title = {{Trackside DEIRA : A Dynamic Engaging Intelligent Reporter Agent}}, url = {http://dl.acm.org/citation.cfm?id=1402404}, year = {2008} } @inproceedings{Helzle2004, address = {New York, NY}, author = {Helzle, V. and Biehn, C. and Schl\"{o}mer, T. and Linner, F.}, booktitle = {Proceedings of ACM SIGGRAPH ’04}, editor = {Barzel, R.}, pages = {54}, publisher = {Association for Computing Machinery}, title = {{Adaptable setup for performance driven facial animation}}, year = {2004} } @article{Cai2006, abstract = {Empathic computing is an emergent paradigm that enables a system to understand human states and feelings and to share this intimate information. The new paradigm is made possible by the convergence of affordable sensors, embedded processors and wireless ad-hoc networks. The power law for multi-resolution channels and mobile-stationary sensor webs is introduced to resolve the information avalanche problems. As empathic computing is sensor-rich computing, particular models such as semantic differential expressions and inverse physics are discussed. A case study of a wearable sensor network for detection of a falling event is presented. It is found that the location of the wearable sensor is sensitive to the results. From the machine learning algorithm, the accuracy reaches up to 90\% from 21 simulated trials. Empathic computing is not limited to healthcare. It can also be applied to solve other everyday-life problems such as management of emails and stress.}, author = {Cai, Yang}, doi = {10.1007/11825890\_3}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cai - 2006 - Empathic computing.pdf:pdf}, journal = {Ambient Intelligence in Everyday Life, Lecture Notes in Computer Science}, pages = {67--85}, publisher = {Springer}, title = {{Empathic computing}}, url = {http://www.springerlink.com/index/l482m128476w5043.pdf}, volume = {3864/2006}, year = {2006} } @incollection{Pereira2008, abstract = {Emotional-BDI agents are BDI agents whose behaviour is guided not only by beliefs, desires and intentions, but also by the role of emotions in reasoning and decision-making. The EBDI logic is a formal sys- tem for expressing the concepts of the Emotional-BDI model of agency. In this paper we present an improved version of the EBDI logic and show how it can be used to model the role of three emotions in Emotional-BDI agents: fear, anxiety and self-confidence. We also focus in the computa- tional properties of EBDI which can lead to its use in automated proof systems.}, author = {Pereira, David}, booktitle = {Computational Logic in Multi-Agent Systems}, doi = {10.1007/978-3-540-88833-8\_4}, editor = {Sadri, Fariba and Satoh, Ken}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pereira - 2008 - Formal Modelling of Emotions in BDI Agents.pdf:pdf}, isbn = {978-3-540-88832-1}, pages = {62--81}, publisher = {Springer-Verlag}, title = {{Formal Modelling of Emotions in BDI Agents}}, year = {2008} } @article{Young2013, author = {Young, S and Ga\v{s}i\'{c}, M and Thomson, Blaise and Williams, JD}, doi = {10.1109/JPROC.2012.2225812}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Young et al. - 2013 - POMDP-Based Statistical Spoken Dialog Systems A Review.pdf:pdf}, journal = {Proceedings of the IEEE}, keywords = {1,at each turn the,components of a finite-state-based,fig,input speech is converted,representation of the user,s intent u t,spoken dialog system,t is updated,the dialog state s,to an abstract}, number = {5}, pages = {1160 -- 1179}, title = {{POMDP-Based Statistical Spoken Dialog Systems: A Review}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=6407655}, volume = {101}, year = {2013} } @article{Tractinsky2000, author = {Tractinsky, N. and Katz, A. S. and Ikar, D.}, doi = {10.1016/s0953-5438(00)00031-x}, journal = {Interacting with Computers}, number = {2}, pages = {127--145}, title = {{What is Beautiful is Usable}}, volume = {13}, year = {2000} } @article{Walters2007, abstract = {OBJECTIVE: Alcohol consumption has been a growing concern at U.S. colleges, particularly among first-year students, who are at increased risk for problems. This study tested the efficacy of the "electronic Check-Up to Go" (e-CHUG), a commercially-available internet program, at reducing drinking among a group of at-risk college freshman. METHOD: The design was a randomized controlled trial: 106 freshmen students who reported heavy episodic drinking were randomly assigned to receive feedback or to assessment only. Assessment measures were completed at baseline, 8 weeks, and 16 weeks. RESULTS: At 8 weeks, the feedback group showed a significant decrease in drinks per week and peak BAC over control. By 16 weeks, the control group also declined to a point where there were no differences between groups. Changes in normative drinking estimates mediated the effect of the intervention. An additional 245 abstainers and light drinkers who were also randomized to condition did not show any intervention effect. CONCLUSIONS: This study provides preliminary support for the efficacy of this intervention at reducing short-term drinking among at-risk students.}, author = {Walters, Scott T and Vader, Amanda M and Harris, T Robert}, institution = {University of Texas School of Public Health, Dallas Regional Campus, Dallas, TX 75390-9128, USA. scott.walters@utsouthwestern.edu}, journal = {Prevention science the official journal of the Society for Prevention Research}, keywords = {adult,alcoholism,alcoholism prevention \& control,feedback,female,humans,internet,male,psychological,questionnaires,texas,universities}, number = {1}, pages = {83--88}, pmid = {17136461}, publisher = {University of Texas School of Public Health, Dallas Regional Campus, Dallas, TX 75390-9128, USA. scott.walters@utsouthwestern.edu}, title = {{A controlled trial of web-based feedback for heavy drinking college students.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17136461}, volume = {8}, year = {2007} } @book{Apa1994, abstract = {DSM-IV}, author = {APA}, booktitle = {W}, institution = {American Psychiatric Association}, isbn = {0890420629}, number = {VI}, pages = {xxvii, 886 p.}, pmid = {1595545}, publisher = {American Psychiatric Association}, title = {{Diagnostic and statistical manual of mental disorders: DSM-IV}}, url = {http://scholar.google.com/scholar?hl=en\&btnG=Search\&q=intitle:DSM-IV:+diagnostic+and+statistical+manual+of+mental+disorders\#0}, volume = {4th}, year = {1994} } @inproceedings{Gilroy2011, address = {New York, New York, USA}, author = {Gilroy, Stephen W. and Cavazza, Marc O. and Vervondel, Valentin}, booktitle = {Proceedings of the 16th international conference on Intelligent user interfaces - IUI '11}, doi = {10.1145/1943403.1943413}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gilroy, Cavazza, Vervondel - 2011 - Evaluating multimodal affective fusion using physiological signals.pdf:pdf}, isbn = {9781450304191}, pages = {53--62}, publisher = {ACM Press}, title = {{Evaluating multimodal affective fusion using physiological signals}}, url = {http://portal.acm.org/citation.cfm?doid=1943403.1943413}, year = {2011} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{Aviezer2008, abstract = {Current theories of emotion perception posit that basic facial expressions signal categorically discrete emotions or affective dimensions of valence and arousal. In both cases, the information is thought to be directly "read out" from the face in a way that is largely immune to context. In contrast, the three studies reported here demonstrated that identical facial configurations convey strikingly different emotions and dimensional values depending on the affective context in which they are embedded. This effect is modulated by the similarity between the target facial expression and the facial expression typically associated with the context. Moreover, by monitoring eye movements, we demonstrated that characteristic fixation patterns previously thought to be determined solely by the facial expression are systematically modulated by emotional context already at very early stages of visual processing, even by the first time the face is fixated. Our results indicate that the perception of basic facial expressions is not context invariant and can be categorically altered by context at early perceptual levels.}, author = {Aviezer, Hillel and Hassin, Ran R and Ryan, Jennifer and Grady, Cheryl and Susskind, Josh and Anderson, Adam and Moscovitch, Morris and Bentin, Shlomo}, institution = {Department of Psychology, Hebrew University of Jerusalem, Jerusalem 91905, Israel. hillel.aviezer@mail.huji.ac.il}, journal = {Psychological Science}, keywords = {adolescent,affect,anger,facial expression,fear,female,humans,male,social perception,visual perception,young adult}, number = {7}, pages = {724--732}, pmid = {18727789}, publisher = {Blackwell Publishing}, title = {{Angry, disgusted, or afraid? Studies on the malleability of emotion perception.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18727789}, volume = {19}, year = {2008} } @inproceedings{Kang2008, author = {Kang, Sin-hwa and Gratch, Jonathan and Wang, Ning and Watt, J.H.}, booktitle = {Proceedings of the 7th international joint conference on Autonomous agents and multiagent systems-Volume 1}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kang et al. - 2008 - Does the contingency of agents' nonverbal feedback affect users' social anxiety.pdf:pdf}, keywords = {agents,contingency of nonverbal feedback,evaluation,rapport,social anxiety,virtual humans}, number = {Aamas}, pages = {120--127}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems}, title = {{Does the contingency of agents' nonverbal feedback affect users' social anxiety?}}, url = {http://dl.acm.org/citation.cfm?id=1402405}, year = {2008} } @inproceedings{Pontier2010, abstract = {In earlier studies, user involvement with an embodied software agent and willingness to use that agent were partially determined by the aesthetics of the design and the moral fiber of the character. We used these empirical results to model agents that in their turn would build up affect for their users much the same way as humans do for agents. Through simulations, we tested these models for internal consistency and were successful in establishing the relationships among the factors as suggested by the earlier user studies. This paper reports on the first confrontation of our agent system with real users to check whether users recognize that our agents function in similar ways as humans do. Through a structured questionnaire, users informed us whether our agents evaluated the user's aesthetics and moral stance while building up a level of involvement with the user and a degree of willingness to interact with the user again.}, author = {Pontier, Matthijs and Siddiqui, Ghazanfar and Hoorn, Johan F}, booktitle = {Proceedings of the 10th international conference on Intelligent virtual agents (IVA)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pontier, Siddiqui, Hoorn - 2010 - Speed Dating with an Affective Virtual Agent - Developing a Testbed for Emotion Models.pdf:pdf}, keywords = {cognitive modeling,emotion modeling,empirical testing,humans,speed dating,virtual}, pages = {91--103}, publisher = {Springer-Verlag Berlin, Heidelberg}, title = {{Speed Dating with an Affective Virtual Agent - Developing a Testbed for Emotion Models}}, url = {http://dl.acm.org/citation.cfm?id=1889087}, year = {2010} } @article{Panksepp1982, abstract = {Emotions seem to arise ultimately from hard-wired neural circuits in the visceral-limbic brain that facilitate diverse and adaptive behavioral and physiological response to major classes of environmental challenges. Presumable these circuits developed early in mammalian brain evolution, and the underlying contro mechanisms remain similar in humans and "lower" mammals. This would suggest that theoretically guided studies of the animal brain can reveal how primitive emotions are organized in the human brain. Conversely, granted these cross-specis heritage, it is arguable that human introspecive access to emotional states may provide direct information concerning operations of emotive circuits and thus be a primary source of hypothese for animal brain research. In this article the possibility that emotions are elaborated by transhypothalamic executive (command) circuits that concurrently activate related behavior patterns is assessed. Current neurobehavioral evidence indicates that there are at least four executive circuits of this type - those which elaborate central states of expectancy, rage, fear, and panic. The manner in which learning and psyuchiatric disorders may arise form activities of such circuits is also discussed.}, author = {Panksepp, J}, issn = {14691825}, journal = {Behavioral and Brain Sciences}, number = {3}, pages = {407--467}, title = {{Toward a general psychobiological theory of emotions}}, url = {http://scholar.google.com.au/scholar?as\_q=Panksepp+J+\&num=10\&btnG=Search+Scholar\&as\_epq=Toward+a+general+psychobiological+theory+of\&as\_oq=\&as\_eq=\&as\_occt=any\&as\_sauthors=\&as\_publication=\&as\_ylo=1982\&as\_yhi=1982\&as\_sdt=1.\&as\_sdtp=on\&as\_sdts=5\&hl=en\#0}, volume = {5}, year = {1982} } @inproceedings{Lee2006, address = {Marina del Rey}, author = {Lee, Jina and Marsella, Stacy C and Rey, Marina Del}, booktitle = {Proceedings of the 6th international conference on Intelligent Virtual Agents (IVA'06)}, editor = {Gratch, Jonathan}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lee, Marsella, Rey - 2006 - Nonverbal Behavior Generator for Embodied Conversational Agents.pdf:pdf}, pages = {243--255}, publisher = {Springer Berlin / Heidelberg}, title = {{Nonverbal Behavior Generator for Embodied Conversational Agents}}, year = {2006} } @inproceedings{Johnson2007, abstract = {Any new tool validated. introduced for education needs to be We developed a virtual human experience called the Virtual Objective Structured Clinical Examination (VOSCE). In the VOSCE, a medical student examines a life-size virtual human who is presenting symptoms of an illness. The student is then graded on interview skills. As part of a medical school class requirement, thirty three second year medical students participated in a user study designed to determine the validity of the VOSCE for testing interview skills. In the study, participant performance in the VOSCE is compared to participant performance in the OSCE, an interview with a trained actor. There was a significant correlation (r(33)=.49, p<.005) between overall score in the VOSCE and overall score in the OSCE. This means that the interaction skills used with a virtual human translate to the interaction skills used with a real human. Comparing the experience of virtual human interaction to real human interaction is the critical validation step towards using virtual humans for interpersonal skills education.}, address = {San Jose, California, USA}, author = {Johnsen, Kyle and Raij, Andrew and Stevens, Amy and Lind, D Scott and Lok, Benjamin}, booktitle = {CHI 2007 Proceedings of Learning \& Education}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Johnsen et al. - 2007 - The Validity of a Virtual Human Experience for Interpersonal Skills Education.pdf:pdf}, isbn = {9781595935939}, keywords = {medicine,multimodal interfaces.,validation,virtual characters,virtual humans,virtual reality}, pages = {1049--1058}, publisher = {ACM}, title = {{The Validity of a Virtual Human Experience for Interpersonal Skills Education}}, year = {2007} } @article{Miller2010, abstract = {The widely-disseminated clinical method of motivational interviewing (MI) arose through a convergence of science and practice. Beyond a large base of clinical trials, advances have been made toward “looking under the hood” of MI to understand the underlying mechanisms by which it affects behavior change. Such specification of outcome-relevant aspects of practice is vital to theory development, and can inform both treatment delivery and clinical training. An emergent theory of MI is proposed, emphasizing two specific active components: a relational component focused on empathy and the interpersonal spirit of MI, and a technical component involving the differential evocation and reinforcement of client change talk A resulting causal chain model links therapist training, therapist and client responses during treatment sessions, and post-treatment outcomes.}, author = {Miller, William R. and Rose, Gary S.}, doi = {10.1037/a0016830}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Miller, Rose - 2010 - Toward a Theory of Motivational Interviewing.pdf:pdf}, journal = {American Psychologist}, keywords = {Behavior change,Causal chain,Client-centered,Motivational interviewing,Psychotherapy,Theory,Therapeutic process}, number = {6}, pages = {527--537}, title = {{Toward a Theory of Motivational Interviewing}}, volume = {64}, year = {2010} } @incollection{Creed2008, abstract = {Why do computers need emotional intelligence? Science fiction often portrays emotional computers as dangerous and frightening, and as a serious threat to human life. One of the most famous examples is HAL, the supercomputer onboard the spaceship Discovery, in the movie 2001: A Space Odyssey. HAL could express, recognize and respond to human emotion, and generally had strong emotional skills — the consequences of which were catastrophic. However, since the movie’s release almost 40 years ago, the traditional view of emotions as contributing to irrational and unpredictable behavior has changed. Recent research has suggested that emotions play an essential role in important areas such as learning, memory, motivation, attention, creativity, and decision making. These findings have prompted a large number of research groups around the world to start examining the role of emotions and emotional intelligence in human-computer interaction (HCI). For almost half a century, computer scientists have been attempting to build machines that can interact intelligently with us, and despite initial optimism, they are still struggling to do so. For much of this time, the role of emotion in developing intelligent computers was largely overlooked, and it is only recently that interest in this area has risen dramatically. This increased interest can largely be attributed to the work of [6] and [85] who were amongst the first to bring emotion to the attention of computer scientists. The former highlighted emotion as a fundamental component required in building believable agents, while the latter further raised the awareness of emotion and its potential importance in HCI. Since these publications, the literature on emotions and computing has grown considerably with progress being made on a number of different fronts.}, author = {Creed, Chris and Beale, Russell}, booktitle = {Computational Intelligence: A Compendium}, doi = {10.1007/978-3-540-78293-3}, editor = {Fulcher, John and Jain, L. C.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Creed, Beale - 2008 - Emotional Intelligence Giving Computers Effective Emotional Skills to Aid Interaction.pdf:pdf}, isbn = {978-3-540-78292-6}, pages = {185--230}, publisher = {Springer Berlin / Heidelberg}, title = {{Emotional Intelligence: Giving Computers Effective Emotional Skills to Aid Interaction}}, url = {http://www.springerlink.com/index/U2231064587Q8V07.pdf}, volume = {230}, year = {2008} } @article{Termine1988, author = {Termine, NT}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Termine - 1988 - Infants' responses to their mothers' expressions of joy and sadness.pdf:pdf}, journal = {Developmental Psychology}, number = {2}, pages = {223--229}, title = {{Infants' responses to their mothers'; expressions of joy and sadness}}, volume = {24}, year = {1988} } @book{Prendinger2004a, abstract = {Life-like characters is one of the most exciting technologies for human-computer interface applications today. They convincingly take the roles of virtual presenters, synthetic actors and sales personas, teammates and tutors. A common characteristic underlying their life-likeness or believability as virtual conversational partners is computational models that provide them with affective functions such as synthetic emotions and personalities and implement human interactive behavior. The wide dissemination of life-like characters in multimedia systems, however, will greatly depend on the availability of control languages and tools that facilitate scripting of intelligent conversational behaviour. This book presents the first comprehensive collection of the latest developments in scripting and representation languages for life-like characters, rounded off with an in-depth comparison and synopsis of the major approaches. Introducing toolkits for authoring animated characters further supports the ease of use of this new interface technology. Life-like characters being a vibrant research area, various applications have been designed and implemented. This book offers coverage of the most successful and promising applications, ranging from product presentation and student training to knowledge integration and interactive gaming. It also discusses the key challenges in the area and provides design guidelines for employing life-like characters.}, author = {{Helmut Prendinger}, Mitsuru Ishizuka}, isbn = {3540008675, 9783540008675}, publisher = {Springer}, title = {{Life-Like Characters: Tools, Affective Functions, and Applications}}, year = {2004} } @article{Vugt2009, author = {van Vugt, HC}, doi = {10.1002/cav}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Vugt - 2009 - Interactive Engagement With Embodied Agents An Empirically Validated Framework.pdf:pdf}, journal = {Computer Animation and Virtual Worlds}, keywords = {embodied agents,empirical researsch,end-user,satisfaction,use intentions,user engagement,virtual characters}, number = {2-3}, pages = {195--204}, title = {{Interactive Engagement With Embodied Agents: An Empirically Validated Framework}}, url = {http://onlinelibrary.wiley.com/doi/10.1002/cav.312/abstract}, volume = {20}, year = {2009} } @article{Bartlett2006, author = {Bartlett, Marian Stewart and Littlewort, Gwen C and Frank, Mark G and Lainscsek, Claudia and Fasel, Ian R and Movellan, Javier R}, doi = {10.4304/jmm.1.6.22-35}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bartlett et al. - 2006 - Automatic Recognition of Facial Actions in Spontaneous Expressions.pdf:pdf}, institution = {UCSD}, issn = {17962048}, journal = {Journal of Multimedia}, number = {6}, pages = {22--35}, publisher = {Citeseer}, title = {{Automatic Recognition of Facial Actions in Spontaneous Expressions}}, volume = {1}, year = {2006} } @article{Scherer2005, abstract = {Defining emotion is a notorious problem. Without consensual conceptualization and operationalization of exactly what phenomenon is to be studied, progress in theory and research is difficult to achieve and fruitless debates are likely to proliferate. A particularly unfortunate example is William Jamess asking the question What is an emotion? when he really meant feeling, a misnomer that started a debate which is still ongoing, more than a century later. This contribution attempts to sensitize researchers in the social and behavioral sciences to the importance of definitional issues and their consequences for distinguishing related but fundamentally different affective processes, states, and traits. Links between scientific and folk concepts of emotion are explored and ways to measure emotion and its components are discussed.}, author = {Scherer, Klaus R.}, doi = {10.1177/0539018405058216}, issn = {05390184}, journal = {Social Science Information}, keywords = {affective processes,emotion,feeling,folk concepts emotion,measurement emotion,scientific concepts emotion}, number = {4}, pages = {695--729}, publisher = {Sage Publications}, title = {{What are emotions? And how can they be measured?}}, url = {http://ssi.sagepub.com/cgi/doi/10.1177/0539018405058216}, volume = {44}, year = {2005} } @article{Segal2011, author = {Segal, Elizabeth}, doi = {10.1080/01488376.2011.564040}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Segal - 2011 - Social Empathy A Model Built on Empathy, Contextual Understanding, and Social Responsibility That Promotes Social Justice.pdf:pdf}, issn = {0148-8376}, journal = {Journal of Social Service Research}, keywords = {a dedication to justice,a nation that proclaims,and social well-being and,civic involvement,empathy,scapegoating,social empathy,social responsibility,the united states is}, month = may, number = {3}, pages = {266--277}, title = {{Social Empathy: A Model Built on Empathy, Contextual Understanding, and Social Responsibility That Promotes Social Justice}}, url = {http://www.informaworld.com/openurl?genre=article\&doi=10.1080/01488376.2011.564040\&magic=crossref||D404A21C5BB053405B1A640AFFD44AE3}, volume = {37}, year = {2011} } @book{Wyatt2008, address = {Mill Valley, CA}, author = {Wyatt, Randall C. and Seid, Erika L.}, publisher = {Psychotherapy.net}, title = {{Instructor's Manual for Motivational Interviewing from the Series Brief Therapy for Addictions}}, year = {2008} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{Campbell1994, abstract = {OBJECTIVES. To achieve the Healthy People 2000 objectives, public health professionals must develop effective dietary interventions that address psychosocial and behavioral components of change. This study tested the effect of individually computer-tailored messages designed to decrease fat intake and increase fruit and vegetable intake. METHODS. Adult patients from four North Carolina family practices were surveyed at baseline and then randomly assigned to one of two interventions or to a control group. The first intervention consisted of individually computer-tailored nutrition messages; the second consisted of nontailored nutrition information based on the 1990 Dietary Guidelines for Americans. Patients were resurveyed 4 months postintervention. RESULTS. The tailored intervention produced significant decreases in total fat and saturated fat scores compared with those of the control group (P < .05). Total fat was decreased in the tailored group by 23\%, in the nontailored group by 9\%, and in the control group by 3\%. Fruit and vegetable consumption did not increase in any study group. Seventy-three percent of the tailored intervention group recalled receiving a message, compared with 33\% of the nontailored intervention group. CONCLUSIONS. Tailored nutrition messages are effective in promoting dietary fat reduction for disease prevention.}, author = {Campbell, M K and DeVellis, B M and Strecher, V J and Ammerman, A S and DeVellis, R F and Sandler, R S}, institution = {Department of Health Behavior, Department of Nutrition, School of Public Health, University of North Carolina, Chapel Hill 27599-7400.}, journal = {American Journal of Public Health}, number = {5}, pages = {783--787}, title = {{Improving dietary behavior: the effectiveness of tailored messages in primary care settings.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=1615043\&tool=pmcentrez\&rendertype=abstract}, volume = {84}, year = {1994} } @book{Reeves1996, address = {New York, NY}, author = {Reeves, B. and Nass, C.}, publisher = {University of Chicago Press}, title = {{The Media Equation: How People Treat Computers, Television, and New Media Like Real People and Places}}, year = {1996} } @inproceedings{Lee2009, abstract = {During face-to-face conversation, the speaker's head is continually in motion. These movements serve a variety of important communicative functions, and may also be influenced by our emotions. The goal for this work is to build a domain-independent model of speaker's head movements and investigate the effect of using affective information during the learning process. Once the model is learned, it can later be used to generate head movements for virtual agents. In this paper, we describe our machine-learning approach to predict speaker's head nods using an annotated corpora of face-to-face human interaction and emotion labels generated by an affect recognition model. We describe the feature selection process, training process, and the comparison of results of the learned models under varying conditions. The results show that using affective information can help predict head nods better than when no affective information is used.}, author = {Lee, Jina and Prendinger, Helmut and Neviarouskaya, Alena and Marsella, Stacy}, booktitle = {3rd International Conference on Affective Computing and Intelligent Interaction (ACII 2009)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lee et al. - 2009 - Learning models of speaker head nods with affective information.pdf:pdf}, isbn = {9781424447992}, pages = {1--6}, publisher = {IEEE}, title = {{Learning models of speaker head nods with affective information}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5349543}, year = {2009} } @misc{University2013, author = {University, Illinois}, title = {{Strengths and Weakness of Online Education}}, url = {http://www.ion.uillinois.edu/resources/tutorials/overview/strengthAndWeak.asp}, urldate = {2013-12-17}, year = {2013} } @article{Wu2008, author = {Wu, Siew-Rong}, doi = {10.1109/DIGITEL.2008.27}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wu - 2008 - Humor and Empathy Developing Students' Empathy through Teaching Robots to Tell English Jokes.pdf:pdf}, isbn = {978-0-7695-3409-1}, journal = {2008 Second IEEE International Conference on Digital Game and Intelligent Toy Enhanced Learning}, pages = {213--214}, publisher = {Ieee}, title = {{Humor and Empathy: Developing Students' Empathy through Teaching Robots to Tell English Jokes}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=4700764}, year = {2008} } @article{Zhang2009, abstract = {This paper presents a new anthropometrics-based method for generating realistic, controllable face models. Our method establishes an intuitive and efficient interface to facilitate procedures for interactive 3D face modeling and editing. It takes 3D face scans as examples in order to exploit the variations presented in the real faces of individuals. The system automatically learns a model prior from the data-sets of example meshes of facial features using principal component analysis (PCA) and uses it to regulate the naturalness of synthesized faces. For each facial feature, we compute a set of anthropometric measurements to parameterize the example meshes into a measurement space. Using PCA coefficients as a compact shape representation, we formulate the face modeling problem in a scattered data interpolation framework which takes the user-specified anthropometric parameters as input. Solving the interpolation problem in a reduced subspace allows us to generate a natural face shape that satisfies the user-specified constraints. At runtime, the new face shape can be generated at an interactive rate.We demonstrate the utility of our method by presenting several applications, including analysis of facial features of subjects in different race groups, facial feature transfer, and adapting face models to a particular population group.}, author = {Zhang, Yu and Prakash, Edmond C.}, doi = {10.1155/2009/573924}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Zhang, Prakash - 2009 - Face to Face Anthropometry-Based Interactive Face Shape Modeling Using Model Priors.pdf:pdf}, issn = {1687-7047}, journal = {International Journal of Computer Games Technology}, pages = {1--15}, title = {{Face to Face: Anthropometry-Based Interactive Face Shape Modeling Using Model Priors}}, url = {http://www.hindawi.com/journals/ijcgt/2009/573924/}, volume = {2009}, year = {2009} } @article{Bryant1982, abstract = {56 1st, 115 4th, and 87 7th graders were administered a newly devised index of empathy partly based on A. Mehrabian and N. Epstein's (see record 1973-23075-001) measure. Item means, item-total correlations, testretest reliabilities, correlations of empathy with aggressiveness and acceptance of individual differences, and correlations with other existing measures of empathy as well as to social desirability response set and reading achievement formed the basis of internal, discriminant, convergent, and general construct validation. The measure demonstrated satisfactory reliability and preliminary construct validity. The study of developmental aspects of empathic arousal toward peers of different sexes is indicated. (38 ref) (PsycINFO Database Record (c) 2010 APA, all rights reserved)}, author = {Bryant, Brenda K}, doi = {10.2307/1128984}, issn = {00093920}, journal = {Child Development}, number = {2}, pages = {413--425}, publisher = {Blackwell Publishing on behalf of the Society for Research in Child Development}, title = {{An Index of Empathy for Children and Adolescents}}, volume = {53}, year = {1982} } @article{Axelson1967, author = {Axelson, John a.}, doi = {10.1002/j.1556-6978.1967.tb00937.x}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Axelson - 1967 - The Relationship of Counselor Candidates' Empathic Perception and Rapport in Small Group Interaction.pdf:pdf}, issn = {00110035}, journal = {Counselor Education and Supervision}, month = jun, number = {4}, pages = {287--292}, title = {{The Relationship of Counselor Candidates' Empathic Perception and Rapport in Small Group Interaction}}, url = {http://doi.wiley.com/10.1002/j.1556-6978.1967.tb00937.x}, volume = {6}, year = {1967} } @book{Katz1985, address = {Dubuque, Iowa}, author = {Katz, Neil H. and Lawyer, John W.}, publisher = {Kendall Hunt}, title = {{Communication and conflict resolution skills}}, year = {1985} } @article{Cooper2003, abstract = {BACKGROUND: African-American patients who visit physicians of the same race rate their medical visits as more satisfying and participatory than do those who see physicians of other races. Little research has investigated the communication process in race-concordant and race-discordant medical visits. OBJECTIVES: To compare patient-physician communication in race-concordant and race-discordant visits and examine whether communication behaviors explain differences in patient ratings of satisfaction and participatory decision making. DESIGN: Cohort study with follow-up using previsit and postvisit surveys and audiotape analysis. SETTING: 16 urban primary care practices. PATIENTS: 252 adults (142 African-American patients and 110 white patients) receiving care from 31 physicians (of whom 18 were African-American and 13 were white). MEASUREMENTS: Audiotape measures of patient-centeredness, patient ratings of physicians' participatory decision-making styles, and overall satisfaction. RESULTS: Race-concordant visits were longer (2.15 minutes 95\% CI, 0.60 to 3.71) and had higher ratings of patient positive affect (0.55 point, 95\% CI, 0.04 to 1.05) compared with race-discordant visits. Patients in race-concordant visits were more satisfied and rated their physicians as more participatory (8.42 points 95\% CI, 3.23 to 13.60). Audiotape measures of patient-centered communication behaviors did not explain differences in participatory decision making or satisfaction between race-concordant and race-discordant visits. CONCLUSIONS: Race-concordant visits are longer and characterized by more patient positive affect. Previous studies link similar communication findings to continuity of care. The association between race concordance and higher patient ratings of care is independent of patient-centered communication, suggesting that other factors, such as patient and physician attitudes, may mediate the relationship. Until more evidence is available regarding the mechanisms of this relationship and the effectiveness of intercultural communication skills programs, increasing ethnic diversity among physicians may be the most direct strategy to improve health care experiences for members of ethnic minority groups.}, author = {Cooper, Lisa A and Roter, Debra L and Johnson, Rachel L and Ford, Daniel E and Steinwachs, Donald M and Powe, Neil R}, institution = {Johns Hopkins University School of Medicine and the Welch Center for Prevention, Epidemiology, and Clinical Research, Johns Hopkins University, Baltimore, Maryland 21205-2223, USA. lisa.cooper@jhmi.edu}, journal = {Annals of Internal Medicine}, keywords = {empirical approach,professional patient relationship}, number = {11}, pages = {907--915}, pmid = {14644893}, title = {{Patient-centered communication, ratings of care, and concordance of patient and physician race.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/14644893}, volume = {139}, year = {2003} } @article{Spencer-Smith2001, author = {Spencer-Smith, J. and Wild, H. and Innes-Ker, A. H. and Townsend, J. and Duffy, C. and Edwards, C. and Paik, J. W.}, doi = {10.3758/BF03195356}, journal = {Behavior Research Methods, Instruments and Computers}, pages = {115--123}, title = {{Making faces: Creating three dimensional parame- terized models of facial expression}}, volume = {33}, year = {2001} } @inproceedings{Kipp2001, author = {Kipp, Michael}, booktitle = {Proceedings of the 7th European Conference on Speech Communication and Technology (Eurospeech)}, pages = {1367--1370}, title = {{Anvil - A Generic Annotation Tool for Multimodal Dialogue}}, year = {2001} } @book{Prendinger2004, author = {Prendinger, Helmut and Ishizuka, M.}, editor = {Prendinger, Helmut and Ishizuka, M.}, isbn = {9783642056550}, publisher = {Springer-Verlag Berlin and Heidelberg GmbH \& Co. K}, title = {{Life-Like Characters. Cognitive Technologies}}, year = {2004} } @misc{Shaw1997, abstract = {Methods and apparatuses described herein automate and confer additive properties to morphs (modification of a starting graphical image to a destination graphical image). The enhanced automated additive morphs created by this invention extend the currently limited scope of animation techniques, creating: moving morphs, where characters can speak, move, and emote during the morphing process; parametric character creation, where features can be sequentially added to a character to create a wide variety of resulting characters; behavioral transference, where character behavior can be automatically transferred to newly created characters, and behavioral layering whereby sequential behavior patterns can be concurrently transferred or imparted to a character. The present invention allows an animator to create, animate, control and transform two and three dimensional images instantaneously and fluidly. The invention provides a superior solution at significantly less cost which extends the range and properties of existing state of the art animation.}, author = {Shaw, Christopher D. and Wilson, Orion}, title = {{Methods And Apparatuses For Controlling Transformation Of Two And Three-Dimensional Images}}, year = {1997} } @inproceedings{Chertoff2010, abstract = {We present the development and evaluation of the Virtual Experience Test (VET). The VET is a survey instrument used to measure holistic virtual environment experiences based upon the five dimensions of experiential design: sensory, cognitive, affective, active, and relational. Experiential Design (ED) is a holistic approach to enhance presence in virtual environments that goes beyond existing presence theory (i.e. a focus on the sensory aspects of VE experiences) to include affective and cognitive factors. To evaluate the VET, 62 participants played the commercial video game Mirror's Edge. After gameplay both the VET and the ITC-Sense of Presence Inventory (ITC-SOPI) were administered. A principal component analysis was performed on the VET and it was determined that the actual question clustering coincided with the proposed dimensions of experiential design. Furthermore, scores from the VET were shown to have a significant relationship with presence scores on the ITC-SOPI. The results of this research produced a validated measure of holistic experience that could be used to evaluate virtual environments. Furthermore, our experiment indicates that virtual environments utilizing holistic designs can result in significantly higher presence.}, address = {Waltham, Massachusetts, USA}, author = {Chertoff, Dustin B. and Goldiez, Brian and LaViola, Joseph J.}, booktitle = {IEEE Virtual Reality Conference (VR)}, doi = {10.1109/VR.2010.5444804}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Chertoff, Goldiez, LaViola - 2010 - Virtual Experience Test A virtual environment evaluation questionnaire.pdf:pdf}, isbn = {978-1-4244-6237-7}, month = mar, pages = {103--110}, publisher = {IEEE}, title = {{Virtual Experience Test: A virtual environment evaluation questionnaire}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5444804}, year = {2010} } @article{Picard2001, author = {Picard, Rosalind W. and Vyzas, E. and Healey, J.}, doi = {10.1109/34.954607}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Picard, Vyzas, Healey - 2001 - Toward machine emotional intelligence analysis of affective physiological state.pdf:pdf}, issn = {01628828}, journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, number = {10}, pages = {1175--1191}, title = {{Toward machine emotional intelligence: analysis of affective physiological state}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=954607}, volume = {23}, year = {2001} } @article{Mehrabian1967a, abstract = {DEALT WITH INCONSISTENT COMMUNICATION OF ATTITUDE IN 2 COMPONENTS OF A MESSAGE. POSITIVE, NEUTRAL, OR NEGATIVE ATTITUDES COMMUNICATED IN SINGLE-WORD CONTENTS WERE EACH COMBINED WITH 3 DEGREES OF ATTITUDE COMMUNICATED IN TONE OF VOICE. IT WAS FOUND, CONSISTENT WITH THE PROPOSED HYPOTHESIS, THAT THE VARIABILITY OF INFERENCES ABOUT COMMUNICATOR ATTITUDE ON THE BASIS OF INFORMATION AVAILABLE IN CONTENT AND TONE COMBINED IS MAINLY CONTRIBUTED BY VARIATIONS IN TONE ALONE. FOR EXAMPLE, WHEN THE ATTITUDE COMMUNICATED IN CONTENT CONTRADICTED THE ATTITUDE COMMUNICATED BY A NEGATIVE TONE, THE TOTAL MESSAGE WAS JUDGED AS COMMUNICATING A NEGATIVE ATTITUDE. THE LIMITATIONS OF THE FINDINGS, AS WELL AS THEIR IMPLICATIONS FOR THE DOUBLE-BLIND THEORY OF SCHIZOPHRENIA, ARE DISCUSSED. (PsycINFO Database Record (c) 2006 APA, all rights reserved)}, author = {Mehrabian, Albert and Wiener, M}, journal = {Journal of Personality and Social Psychology}, keywords = {attitude,communication,cues,humans,schizophrenic psychology,voice}, number = {1}, pages = {109--114}, pmid = {6032751}, title = {{Decoding of inconsistent communications.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/6032751}, volume = {6}, year = {1967} } @article{Ekman1974, author = {Ekman, Paul and Freisen, Wallace V.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ekman, Freisen - 1974 - Detecting Deception From The Body Or Face.pdf:pdf}, journal = {Journal of Personality and Social Psychology}, number = {3}, pages = {288--298}, title = {{Detecting Deception From The Body Or Face}}, volume = {29}, year = {1974} } @book{Miller2002, abstract = {This bestselling work has introduced hundreds of thousands of professionals and students to motivational interviewing (MI), a proven approach to helping people overcome ambivalence that gets in the way of change. William R. Miller and Stephen Rollnick explain current thinking on the process of behavior change, present the principles of MI, and provide detailed guidelines for putting it into practice. Case examples illustrate key points and demonstrate the benefits of MI in addictions treatment and other clinical contexts. The authors also discuss the process of learning MI. Chapters contributed by other leading experts address such special topics as MI and the stages-of-change model; applications in medical, public health, and criminal justice settings; and using the approach with groups, couples, and adolescents.}, address = {New York}, author = {Miller, William R. and Rollnick, Stephen}, booktitle = {Zeitschrift f\"{u}r Klinische Psychologie und Psychotherapie}, chapter = {428}, doi = {10.1026/1616-3443.34.1.66}, edition = {2nd}, isbn = {1572305630}, issn = {16163443}, number = {1}, pages = {428}, pmid = {12538308}, publisher = {Guilford Press}, title = {{Motivational Interviewing: Preparing People for Change}}, url = {http://books.google.com/books?id=r\_CuyHwdz7EC\&pgis=1}, volume = {2nd}, year = {2002} } @article{Hatfield1994, abstract = {(From the introduction) The focus in this text is on rudimentary or primitive emotional contagion-that which is relatively automatic, unintentional, uncontrollable, and largely inaccessible to conversant awareness. This is defined as the tendency to automatically mimic and synchronize facial expressions, vocalizations, postures, and movements with those of another person, and consequently, to converge emotionally. Emotional contagion may well be important in personal relationships: It fosters behavioral synchrony and the tracking of the feelings of others moment to moment, even when individuals are not explicitly attending to this information. (PsycINFO Database Record (c) 2008 APA}, author = {Hatfield, Elaine and Cacioppo, John T and Rapson, Richard L}, doi = {10.1111/1467-8721.ep10770953}, editor = {Craighead, W E and Nemeroff, C B}, isbn = {0521449480}, issn = {09637214}, journal = {Current Directions in Psychological Science}, number = {3}, pages = {96--99}, pmid = {20544488}, publisher = {Cambridge University Press}, title = {{Emotional contagion}}, url = {http://dx.doi.org/10.1111/j.1467-8721.1993.tb00114.x}, volume = {2}, year = {1994} } @article{LeonA.1995, author = {Kappelman, Leon A.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kappelman - 1995 - Measuring User Involvement A Diffusion of Innovation Perspective.pdf:pdf}, journal = {Data Base Advances}, number = {2 \& 3}, pages = {65--86}, title = {{Measuring User Involvement: A Diffusion of Innovation Perspective}}, volume = {26}, year = {1995} } @article{Wilmer1968, author = {Wilmer, H. A.}, journal = {British Journal of Medical Psychology}, pages = {243--248}, title = {{The doctor-patient relationship and issues of pity, sympathy and empathy}}, volume = {41}, year = {1968} } @article{Brave2005, abstract = {Embodied computer agents are becoming an increasingly popular human-computer interaction technique. Often, these agents are programmed with the capacity for emotional expression. This paper investigates the psychological effects of emotion in agents upon users. In particular, two types of emotion were evaluated: self-oriented emotion and other-oriented, empathic emotion. In a 2 (self-oriented emotion: absent vs. present) by 2 (empathic emotion: absent vs. present) by 2 (gender dyad: male vs. female) between-subjects experiment (N = 96), empathic emotion was found to lead to more positive ratings of the agent by users, including greater likeability and trustworthiness, as well as greater perceived caring and felt support. No such effect was found for the presence of self-oriented emotion. Implications for the design of embodied computer agents are discussed and directions for future research suggested.}, author = {Brave, Scott and Nass, Clifford and Hutchinson, Kevin}, doi = {10.1016/j.ijhcs.2004.11.002}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Brave, Nass, Hutchinson - 2005 - Computers that care investigating the effects of orientation of emotion exhibited by an embodied comput.pdf:pdf}, issn = {10715819}, journal = {International Journal of Human-Computer Studies - Special issue: Subtle expressivity for characters and robots}, keywords = {affective computing,characters,embodied agents,emotion,empathy,social interfaces}, month = feb, number = {2}, pages = {161--178}, title = {{Computers that care: investigating the effects of orientation of emotion exhibited by an embodied computer agent}}, url = {http://www.sciencedirect.com/science/article/pii/S1071581904001284}, volume = {62}, year = {2005} } @inproceedings{Jerritta2011, author = {Jerritta, S and Murugappan, M and Nagarajan, R and Wan, Khairunizam}, booktitle = {IEEE 7th International Colloquium on Signal Processing and its Applications}, doi = {10.1109/CSPA.2011.5759912}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Jerritta et al. - 2011 - Physiological signals based human emotion Recognition a review.pdf:pdf}, isbn = {978-1-61284-414-5}, keywords = {emotion,inducement stimuli,physiological signals,signal processing techniques}, month = mar, pages = {410--415}, publisher = {IEEE}, title = {{Physiological signals based human emotion Recognition: a review}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5759912}, year = {2011} } @article{Rosenberg1995, author = {Rosenberg, E. and Ekman, Paul}, journal = {Motivation and Emotion}, number = {2}, pages = {111--138}, title = {{Conceptual and methodological issues in the judgment of facial expressions of emotion}}, volume = {19}, year = {1995} } @incollection{Wispe1987, author = {Wisp\'{e}, L}, booktitle = {Empathy and its development}, chapter = {2}, editor = {Einsenberg, Nancy and Strayer, Janet}, isbn = {0521326095}, issn = {05213260}, pages = {17--37}, publisher = {Cambridge University Press}, title = {{History of the concept of empathy}}, year = {1987} } @article{VanderSchalk2011a, author = {van der Schalk, J. and Hawk, S. T. and Fischer, A. H. and Doosje, B. J.}, doi = {10.1037/a0023853}, journal = {Emotion}, pages = {907--920}, title = {{Validation of the Amsterdam Dynamic Facial Expression Set (ADFES)}}, volume = {11}, year = {2011} } @inproceedings{Borutta2009, abstract = {Emotional expressions are considered to be important for robotic and virtual agents to improve nonverbal communication in human-machine-interaction. In this paper we focus on a subset of emotional expressions, namely the smile and it's variations. The proposed concept for generating artificial smile sequences is based on the system-theoretic psychological model of smiling, which is based on the Zurich Model of Social Motivation. The model and seven different types of smiles are introduced and it is presented how to integrate this model in a virtual agent. The evaluation of the generated facial expressions shows that the seven types of smiles are distinguishable from each other and can be classified according to given categories.}, address = {Toyama, Japan}, author = {Borutta, Isabell and Sosnowski, Stefan and Zehetleitner, Michael}, booktitle = {The 18th IEEE International Symposium on Robot and Human Interactive Communication, 2009. RO-MAN 2009.}, doi = {10.1109/ROMAN.2009.5326255}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Borutta, Sosnowski, Zehetleitner - 2009 - Generating artificial smile variations based on a psychological system-theoretic approach.pdf:pdf}, pages = {245 -- 250}, title = {{Generating artificial smile variations based on a psychological system-theoretic approach}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5326255}, year = {2009} } @incollection{Sebe2005a, author = {Sebe, Nicu and Cohen, Ira and Huang, Thomas S.}, booktitle = {Handbook of Pattern Recognition and Computer Vision}, chapter = {1}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sebe, Cohen, Huang - 2005 - Multimodal emotion recognition.pdf:pdf}, pages = {981--256}, publisher = {Citeseer}, title = {{Multimodal emotion recognition}}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.110.1129\&rep=rep1\&type=pdf}, year = {2005} } @article{Chuang2004, author = {Chuang, ZJ}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Chuang - 2004 - Multi-modal emotion recognition from speech and text.pdf:pdf}, journal = {International Journal of Computational}, number = {2}, pages = {45--62}, title = {{Multi-modal emotion recognition from speech and text}}, url = {http://www.mendeley.com/research/multimodal-emotion-recognition-from-speech-and-text/}, volume = {9}, year = {2004} } @incollection{Miller1986, abstract = {The matching hypothesis proposes that clients problem-drinkers who are matched to appropriate treatments will show greater improvement than will those who are unmatched or mismatched undifferentiated treatment: the status quo research strategies predictor studies differential studies problem severity cognitive style neuropsychological status self-esteem social stability client choice (PsycINFO Database Record (c) 2006 APA, all rights reserved)}, author = {Miller, William R. and Hester, Reid K}, booktitle = {Treating addictive behaviors Processes of change}, editor = {Miller, William R and Heather, Nick}, isbn = {0306422484}, pages = {175--203}, publisher = {Plenum Press}, title = {{Matching problem drinkers with optimal treatments.}}, year = {1986} } @book{Stokes2003, author = {{Maura E. Stokes} and Davis, Charles S. and Koch, Gary G.}, edition = {2nd}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Maura E. Stokes, Davis, Koch - 2003 - Categorical Data Analysis Using the SAS System.pdf:pdf}, isbn = {1-58025-710-0}, pages = {1--86}, publisher = {SAS Institute and Wiley}, title = {{Categorical Data Analysis Using the SAS System}}, year = {2003} } @misc{Tsui1985, abstract = {The extent to which relatively unassimilated Asian clients can utilize traditional psychotherapy is likely to depend upon the ability of therapists to understand cultural differences and to adapt their clinical styles accordingly. Common errors made by non-Asian therapists attempting to engage Asians in psychotherapy are identified and appropriate therapeutic strategies are suggested.}, author = {Tsui, P and Schultz, G L}, booktitle = {The American journal of orthopsychiatry}, number = {4}, pages = {561--569}, pmid = {4073227}, title = {{Failure of rapport: why psychotherapeutic engagement fails in the treatment of Asian clients.}}, volume = {55}, year = {1985} } @article{Szymanski2007, abstract = {The first step towards creating avatars with human-like artificial minds is to give them human-like memory structures with an access to general knowledge about the world. This type of knowledge is stored in semantic memory. Although many approaches to modeling of semantic memories have been proposed they are not very useful in real life applications because they lack knowledge comparable to the common sense that humans have, and they cannot be implemented in a computationally efficient way. The most drastic simplification of semantic memory leading to the simplest knowledge representation that is sufficient for many applications is based on the Concept Description Vectors (CDVs) that store, for each concept, an information whether a given property is applicable to this concept or not. Unfortunately even such simple information about real objects or concepts is not available. Experiments with automatic creation of concept description vectors from various sources, including ontologies, dictionaries, encyclopedias and unstructured text sources are described. Haptek-based talking head that has an access to this memory has been created as an example of a humanized interface (HIT) that can interact with web pages and exchange information in a natural way. A few examples of applications of an avatar with semantic memory are given, including the twenty questions game and automatic creation of word puzzles.}, author = {Szymanski, Julian and Sarnatowicz, Tomasz and Duch, Wlodzislaw}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Szymanski, Sarnatowicz, Duch - 2007 - Towards Avatars with Artificial Minds Role of Semantic Memory.pdf:pdf}, journal = {Journal of Ubiquitous Computing and Intelligence}, keywords = {avatars,cyberspace,dialogue systems,natural language processing,semantic memory,word games}, title = {{Towards Avatars with Artificial Minds : Role of Semantic Memory}}, url = {http://cogprints.org/5357/}, year = {2007} } @book{Hogg2009, author = {Hogg, Robert V. and Tanis, Elliot}, edition = {8}, isbn = {0321656717}, publisher = {Pearson}, title = {{Probability and Statistical Inference}}, year = {2009} } @book{Frijda1986, address = {New York}, author = {Frijda, Nico H.}, isbn = {9780521316002}, pages = {544}, publisher = {Cambridge University Press}, title = {{The Emotions}}, year = {1986} } @phdthesis{Axelrod2009, author = {Axelrod, Lesley Ann}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Axelrod - 2009 - Emotional Recognition in Computing.pdf:pdf}, number = {September}, school = {Brunel University}, title = {{Emotional Recognition in Computing}}, type = {PhD Thesis}, year = {2009} } @book{Fussell2002, author = {Fussell, Susan R.}, editor = {Fussell, Susan R.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Fussell - 2002 - The verbal communication of emotions Interdisciplinary perspectives.pdf:pdf}, isbn = {9780805836905}, pages = {294}, publisher = {Lawrence Erlbaum Associates}, title = {{The verbal communication of emotions: Interdisciplinary perspectives}}, url = {http://books.google.com/books?id=MHea6DYYfEgC}, year = {2002} } @article{Lazarus1991, abstract = {The 2 main tasks of this article are 1st, to examine what a theory of emotion must do and basic issues that it must address. These include definitional issues, whether or not physiological activity should be a defining attribute, categorical versus dimensional strategies, the reconciliation of biological universals with sociocultural sources of variability, and a classification of the emotions. The 2nd main task is to apply an analysis of appraisal patterns and the core relational themes that they produce to a number of commonly identified emotions. Anger, anxiety, sadness, and pride (to include 1 positive emotion) are used as illustrations. The purpose is to show the capability of a cognitive-motivational-relational theory to explain and predict the emotions. The role of coping in emotion is also discussed, and the article ends with a response to criticisms of a phenomenological, folk-theory outlook.}, author = {Lazarus, Richard S.}, doi = {10.1037/0003-066X.46.8.819}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lazarus - 1991 - Progress on a cognitive-motivational-relational theory of emotion.pdf:pdf}, issn = {0003-066X}, journal = {The American psychologist}, keywords = {Cognition,Emotions,Humans,Interpersonal Relations,Motivation,Psychological Theory}, month = aug, number = {8}, pages = {819--834}, pmid = {1928936}, title = {{Progress on a cognitive-motivational-relational theory of emotion}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/1928936}, volume = {46}, year = {1991} } @book{Greenson1967, address = {New York}, author = {Greenson, Ralph R}, pages = {367}, publisher = {International University Press}, title = {{The techniques and practice of psychoanalysis}}, year = {1967} } @inproceedings{Lisetti2008a, author = {Lisetti, Christine L and Wagner, Eric}, booktitle = {Proceedings of the AAAI Spring Symposium on Emotion, Personality and Social Behavior}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lisetti, Wagner - 2008 - Mental Health Promotion with Animated Characters Exploring Issues and Potential.pdf:pdf}, keywords = {Technical Report SS-08-04}, title = {{Mental Health Promotion with Animated Characters : Exploring Issues and Potential}}, year = {2008} } @article{Straalen2009, author = {Straalen, Bart Van and Heylen, Dirk and Theune, Mari\"{e}t}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Straalen, Heylen, Theune - 2009 - Enhancing Embodied Conversational Agents with Social and Emotional Capabilities.pdf:pdf}, journal = {Agents for Games and}, keywords = {bad news con-,embodied conversational agents,empathy,social agents,tutoring,versations}, pages = {95--106}, title = {{Enhancing Embodied Conversational Agents with Social and Emotional Capabilities}}, url = {http://www.springerlink.com/index/3612181747K5L570.pdf}, year = {2009} } @article{Wolf2010, abstract = {Computer Vision and Biometrics systems have demonstrated considerable improvement in recognizing and verifying faces in digital images. Still, recognizing faces appearing in unconstrained, natural conditions remains a challenging task. In this paper we present a face-image, pair-matching approach primarily developed and tested on the "Labeled Faces in the Wild" (LFW) benchmark that reflect the challenges of face recognition from unconstrained images. The approach we propose makes the following contributions. (a) We present a family of novel face-image descriptors designed to capture statistics of local patch similarities. (b) We demonstrate how semi-labeled background samples may be used to better evaluate image similarities. To this end we describe a number of novel, effective similarity measures. (c) We show how labeled background samples, when available, may further improve classification performance, by employing a unique pair-matching pipeline. We present state-of-the-art results on the LFW pair-matching benchmarks. In addition, we show our system to be well suited for multi-label face classification (recognition) problems. We perform recognition tests on LFW images as well images from the laboratory controlled multiPIE database.}, author = {Wolf, Lior and Hassner, Tal and Taigman, Yaniv}, doi = {10.1109/TPAMI.2010.230}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wolf, Hassner, Taigman - 2010 - Effective Unconstrained Face Recognition by Combining Multiple Descriptors and Learned Background Statis.pdf:pdf}, issn = {1939-3539}, journal = {IEEE transactions on pattern analysis and machine intelligence}, month = dec, pages = {1--13}, pmid = {21173442}, title = {{Effective Unconstrained Face Recognition by Combining Multiple Descriptors and Learned Background Statistics.}}, volume = {33}, year = {2010} } @inproceedings{Dinda2007, abstract = {Experimental computer systems research typically ignores the end-user, modeling him, if at all, in overly simple ways. We argue that this (1) results in inadequate performance evaluation of the systems, and (2) ignores opportunities. We summarize our experiences with (a) directly evaluating user satisfaction and (b) incorporating user feedback in different areas of client/server computing, and use our experiences to motivate principles for that domain. Specifically, we report on user studies to measure user satisfaction with resource borrowing and with different clock frequencies in desktop computing, the development and evaluation of user interfaces to integrate user feedback into scheduling and clock frequency decisions in this context, and results in predicting user action and system response in a remote display system. We also present initial results on extending our work to user control of scheduling and mapping of virtual machines in a virtualization-based distributed computing environment. We then generalize (a) and (b) as recommendations for incorporating the user into experimental computer systems research.}, address = {New York, USA}, author = {Dinda, Peter A and Dick, Robert P and Rossoff, Samuel}, booktitle = {ExpCS '07 Proceedings of the 2007 workshop on Experimental computer science ACM}, doi = {10.1145/1281700.1281710}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Dinda, Dick, Rossoff - 2007 - The User In Experimental Computer Systems Research.pdf:pdf}, isbn = {9781595937513}, keywords = {Autonomic Systems,Design,Experimentation,Human Directed Adaptation,Human Factors,Measurement,Performance,Speculative Remote Display,User Comfort With Resource Borrowing,User-driven Power Management,User-driven Scheduling}, number = {June}, pages = {1--12}, title = {{The User In Experimental Computer Systems Research}}, url = {http://dl.acm.org/citation.cfm?id=1281710}, year = {2007} } @inproceedings{Partala2000, abstract = {This paper investigated in two experiments pupillary responses to emotionally provocative sound stimuli. In experiment one, 30 subjects' pupillary responses were measured while listening to 10 negatively and 10 positively arousing sounds, and 10 emotionally neutral sounds. In addition, the subjects rated their subjective experiences to these stimuli. The results showed that the pupil size was significantly larger after highly arousing positive and negative stimuli than after neutral stimuli with medium arousal. In experiment two, the contents of the stimuli were more controlled than in experiment one. 22 subjects' pupillary responses were measured while listening to four negatively and four positively arousing sounds, and four emotionally neutral sounds. The results showed that the pupil size was significantly larger during negative highly arousing stimuli than during moderately arousing positive stimuli. The pupil size was alsosignificantly larger after highly arousing negative stimuli than after moderately arousing neutral and positive stimuli. The results of the two experiments suggest that pupil size discriminates during and after different kinds of emotional stimuli. Thus, the measurement of pupil size variation may be a potentially useful computer input signal, for example, for affective computing.}, author = {Partala, Timo and Jokiniemi, Maria and Surakka, Veikko}, booktitle = {of the 2000 symposium on Eye}, doi = {10.1145/355017.355042}, isbn = {1581132808}, pages = {123--129}, publisher = {ACM}, title = {{Pupillary responses to emotionally provocative stimuli}}, url = {http://dl.acm.org/citation.cfm?id=355042}, year = {2000} } @article{Mendelson1999, author = {Mendelson, M.J. and Aboud, F.E.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mendelson, Aboud - 1999 - Measuring friendship quality in late adolescents and young adults McGill Friendship Questionnaires.pdf:pdf}, journal = {Canadian Journal of Behavioural Science/Revue canadienne des sciences du comportement}, number = {2}, pages = {130}, publisher = {Canadian Psychological Association}, title = {{Measuring friendship quality in late adolescents and young adults: McGill Friendship Questionnaires.}}, url = {http://psycnet.apa.org/journals/cbs/31/2/130/}, volume = {31}, year = {1999} } @article{CoanJr1984, abstract = {Rapport is a characteristic of a relationship if there is a high degree of empathy, attention, and shared understanding and expectations. Rapport should be enhanced when the salesperson and the customer are comembers of the same group. Also, rapport should aid persuasion and help increase consumer satisfaction. Both observational and paper-and-pencil techniques can be used to measure rapport.}, author = {{Coan Jr}, G.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Coan Jr - 1984 - RAPPORT DEFINITION AND DIMENSIONS.pdf:pdf}, journal = {Advances in Consumer Research}, pages = {333--336}, title = {{RAPPORT: DEFINITION AND DIMENSIONS}}, url = {http://www.acrwebsite.org/volumes/display.asp?id=6269}, volume = {11}, year = {1984} } @article{Larimer2007, abstract = {The current study was designed to evaluate the efficacy of a mailed feedback and tips intervention as a universal prevention strategy for college drinking. Participants (N = 1,488) were randomly assigned to feedback or assessment-only control conditions. Results indicated that the mailed feedback intervention had a preventive effect on drinking rates overall, with participants in the feedback condition consuming less alcohol at follow-up in comparison with controls. In addition, abstainers in the feedback condition were twice as likely to remain abstinent from alcohol at follow-up in comparison with control participants (odds ratio = 2.02), and feedback participants were significantly more likely to refrain from heavy episodic drinking (odds ratio = 1.43). Neither gender nor severity of baseline drinking moderated the efficacy of the intervention in these analyses, but more conservative analyses utilizing last-observation carryforward suggested women and abstainers benefited more from this prevention approach. Protective behaviors mediated intervention efficacy, with participants who received the intervention being more likely to use strategies such as setting limits and alternating alcohol with nonalcoholic beverages. Implications of these findings for universal prevention of college drinking are discussed.}, author = {Larimer, Mary E and Lee, Christine M and Kilmer, Jason R and Fabiano, Patricia M and Stark, Christopher B and Geisner, Irene M and Mallett, Kimberly A and Lostutter, Ty W and Cronce, Jessica M and Feeney, Maggie and Neighbors, Clayton}, institution = {Department of Psychiatry and Behavioral Sciences, University of Washington, Seattle, WA 98105, USA. larimer@u.washington.edu}, journal = {Journal of Consulting and Clinical Psychology}, keywords = {adult,alcohol drinking,alcohol drinking epidemiology,alcohol drinking prevention \& control,communication,feedback,female,humans,male,motivation,postal service,students,students statistics \& numerical data,universities}, number = {2}, pages = {285--293}, pmid = {17469886}, publisher = {American Psychological Association. Journals Department, 750 First Street NE, Washington, DC 20002-4242. Tel: 800-374-2721; Tel: 202-336-5510; Fax: 202-336-5502; e-mail: order@apa.org; Web site: http://www.apa.org/publications}, title = {{Personalized mailed feedback for college drinking prevention: a randomized clinical trial.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17469886}, volume = {75}, year = {2007} } @phdthesis{Li2007, author = {Li, Xi}, booktitle = {Interface}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Li - 2007 - SPEech Feature Toolbox (SPEFT) Design and Emotional Speech Feature Extraction.pdf:pdf}, school = {Marquette University}, title = {{SPEech Feature Toolbox (SPEFT) Design and Emotional Speech Feature Extraction}}, url = {http://speechlab.eece.mu.edu/johnson/papers/li\_thesis.pdf}, year = {2007} } @article{Fagerstrom1990, author = {Fagerstrom, K O and Heatherton, T F and Kozlowski, L T}, institution = {Department of Psychology, Harvard University.}, journal = {Ear nose throat journal}, keywords = {*adverse effects,*diagnosis,*nicotine,*standards,alcoholism,complications,etiology,human,ph,prevention \& control,psyc,questionnaires,smoking,substance related disorders}, number = {11}, pages = {763--765}, pmid = {2276350}, title = {{Nicotine addiction and its assessment.}}, volume = {69}, year = {1990} } @article{Ward2000, abstract = {Back-channel feedback, responses such as uh-uh from a listener, is a pervasive feature of conversation. It has long been thought that the production of back-channel feedback depends to a large extent on the actions of the other conversation partner, not just on the volition of the one who produces them. In particular, prosodic cues from the speaker have long been thought to play a role, but have so far eluded identification. We have earlier suggested that an important prosodic cue involved, in both English and Japanese, is a region of low pitch late in an utterance (Ward, 1996). This paper discusses issues in the definition of back-channel feedback, presents evidence for our claim, surveys other factors which elicit or inhibit back-channel responses, and mentions a few related phenomena and theoretical issues. (C) 2000 Elsevier Science B.V. All rights reserved.}, author = {Ward, Nigel and Tsukahara, Wataru}, doi = {10.1016/S0378-2166(99)00109-5}, issn = {03782166}, journal = {Journal of Pragmatics}, number = {8}, pages = {1177--1207}, publisher = {Elsevier}, title = {{Prosodic features which cue back-channel responses in English and Japanese}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0378216699001095}, volume = {32}, year = {2000} } @article{Shimoda2000, author = {Shimoda, H. and Kunihiro, T. and Yang, D. and Yoshikawa, H.}, doi = {10.1109/IECON.2000.972406}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Shimoda et al. - 2000 - Design of affective interface for realizing human-machine empathy.pdf:pdf}, isbn = {0-7803-6456-2}, journal = {2000 26th Annual Conference of the IEEE Industrial Electronics Society. IECON 2000. 2000 IEEE International Conference on Industrial Electronics, Control and Instrumentation. 21st Century Technologies and Industrial Opportunities (Cat. No.00CH37141)}, pages = {2589--2594}, publisher = {Ieee}, title = {{Design of affective interface for realizing human-machine empathy}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=972406}, volume = {4}, year = {2000} } @article{Krumhuber2012, abstract = {In this article, we present FACSGen 2.0, new animation software for creating static and dynamic three-dimensional facial expressions on the basis of the Facial Action Coding System (FACS). FACSGen permits total control over the action units (AUs), which can be animated at all levels of intensity and applied alone or in combination to an infinite number of faces. In two studies, we tested the validity of the software for the AU appearance defined in the FACS manual and the conveyed emotionality of FACSGen expressions. In Experiment 1, four FACS-certified coders evaluated the complete set of 35 single AUs and 54 AU combinations for AU presence or absence, appearance quality, intensity, and asymmetry. In Experiment 2, lay participants performed a recognition task on emotional expressions created with FACSGen software and rated the similarity of expressions displayed by human and FACSGen faces. Results showed good to excellent classification levels for all AUs by the four FACS coders, suggesting that the AUs are valid exemplars of FACS specifications. Lay participants' recognition rates for nine emotions were high, and comparisons of human and FACSGen expressions were very similar. The findings demonstrate the effectiveness of the software in producing reliable and emotionally valid expressions, and suggest its application in numerous scientific areas, including perception, emotion, and clinical and neuroscience research. (PsycINFO Database Record (c) 2012 APA, all rights reserved).}, author = {Krumhuber, Eva G and Tamarit, Lucas and Roesch, Etienne B and Scherer, Klaus R.}, doi = {10.1037/a0026632}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Krumhuber et al. - 2012 - FACSGen 2.0 animation software Generating three-dimensional FACS-valid facial expressions for emotion research.pdf:pdf}, issn = {1931-1516}, journal = {Emotion}, keywords = {and recognition of emotions,animation,emotion,expressions,expressive stimuli has contributed,facial action coding system,facial expression,facsgen,knowledge of the perception,last years,much to our,over the,several databases of emotion-specific,the use of facial}, month = jan, number = {2}, pages = {351--363}, pmid = {22251045}, title = {{FACSGen 2.0 animation software: Generating three-dimensional FACS-valid facial expressions for emotion research.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/22251045}, volume = {12}, year = {2012} } @incollection{Cassell1999a, abstract = {This paper addresses the problem of designing conversational agents that exhibit appropriate gaze behavior during dialogues with human users. Previous research on gaze behavior has concentrated on its relationship to turn-taking phenomena 4,5,6. Recent work has incorporated some of these findings into the design of autonomous human-like conversational agents and interactive communicative humanoids 1,14. However, none of this research has examined the relationship between information structure and gaze behavior. In this paper we discuss why turn- taking is not an adequate explanation for gaze behavior in conversation and why information structure should be integrated with turn-taking as an explanation for this behavior. We then examine the relationship of gaze behavior to information structure and turn-taking through an empirical analysis of discourse transcripts for several dyadic conversations. A simple algorithm for assigning gaze behavior is proposed on the basis of the findings of this empirical analysis. We describe work in progress on implementing this algorithm in an autonomous conversational humanoid agent with the goal of producing more natural gaze behavior related to propositional content in human- computer conversations.}, author = {Cassell, Justine and Torres, Obed E and Prevost, Scott}, booktitle = {Machine Conversations}, editor = {Wilks, Y}, pages = {143--154}, publisher = {Kluwer}, title = {{Turn Taking vs. Discourse Structure: How Best to Model Multimodal Conversation}}, url = {http://citeseer.ist.psu.edu/cassell98turn.html}, year = {1999} } @incollection{Plutchik1980, address = {New York}, author = {Plutchik, R}, booktitle = {Emotion: Theory, research, and experience}, chapter = {Theories o}, editor = {Plutchik, R and Kellerman, H}, number = {3}, pages = {3--33}, publisher = {Academic Press}, series = {Emotion: Theory, research, and experience: Vol. 1. Theories of emotion}, title = {{A general psychoevolutionary theory of emotion}}, url = {http://scholar.google.com/scholar?hl=en\&btnG=Search\&q=intitle:A+general+psychoevolutionary+theory+of+emotion\#0}, volume = {1}, year = {1980} } @inproceedings{Wright2008, author = {Wright, Peter and McCarthy, J.}, booktitle = {Proceeding of the twenty-sixth annual SIGCHI conference on Human factors in computing systems}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wright, McCarthy - 2008 - Empathy and experience in HCI.pdf:pdf}, isbn = {9781605580111}, pages = {637--646}, publisher = {ACM}, title = {{Empathy and experience in HCI}}, url = {http://dl.acm.org/citation.cfm?id=1357156}, year = {2008} } @inproceedings{Sourina2011, address = {New York, New York, USA}, author = {Sourina, Olga and Liu, Y. and Nguyen, Minh Khoa}, booktitle = {SIGGRAPH Asia 2011 Posters on - SA '11}, doi = {10.1145/2073304.2073315}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sourina, Liu, Nguyen - 2011 - Emotion-enabled EEG-based interaction.pdf:pdf}, isbn = {9781450311373}, pages = {1}, publisher = {ACM Press}, title = {{Emotion-enabled EEG-based interaction}}, url = {http://dl.acm.org/citation.cfm?doid=2073304.2073315}, year = {2011} } @article{Ghanem2005, abstract = {BACKGROUND: Audio computer assisted self interview (ACASI) may minimise social desirability bias in the ascertainment of sensitive behaviours. The aim of this study was to describe the difference in reporting risk behaviour in ACASI compared to a face to face interview (FFI) among public sexually transmitted diseases (STD) clinic attendees. STUDY DESIGN: Randomly selected patients attending a public STD clinic in Baltimore, Maryland, sequentially took an ACASI formatted risk behaviour assessment followed by an FFI conducted by a single clinician, with both interview modalities surveying sexual and drug use behaviours. Binary responses were compared using the sign test, and categorical responses were compared using the Wilcoxon signed rank test to account for repeated measures. RESULTS: 671 (52\% men, mean age 30 years, 95\% African American) of 795 clinic attendees screened consented to participate. Subjects affirmed sensitive sexual behaviours such as same sex contact (p = 0.012), receptive rectal sexual exposure (p < 0.001), orogenital contact (p < 0.001), and a greater number of sex partners in the past month (p < 0.001) more frequently with ACASI than with an FFI. However, there were no differences in participant responses to questions on use of illicit drugs or needle sharing. CONCLUSIONS: Among STD clinic patients, reporting of sensitive sexual risk behaviours to clinicians was much more susceptible to social desirability bias than was reporting of illegal drug use behaviours. In STD clinics where screening of sexual risk is an essential component of STD prevention, the use of ACASI may be a more reliable assessment method than traditional FFI.}, author = {Ghanem, K G and Hutton, H E and Zenilman, J M and Zimba, R and Erbelding, E J}, doi = {10.1136/sti.2004.013193}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ghanem et al. - 2005 - Audio computer assisted self interview and face to face interview modes in assessing response bias among STD clin.pdf:pdf}, issn = {1368-4973}, journal = {Sexually transmitted infections}, keywords = {Adult,Ambulatory Care,Bias (Epidemiology),Computer-Assisted,Computer-Assisted: methods,Diagnosis,Female,Humans,Interviews as Topic,Interviews as Topic: methods,Male,Middle Aged,Questionnaires,Risk-Taking,Sexual Behavior,Sexually Transmitted Diseases,Sexually Transmitted Diseases: diagnosis,Tape Recording,Unsafe Sex}, month = oct, number = {5}, pages = {421--5}, pmid = {16199744}, title = {{Audio computer assisted self interview and face to face interview modes in assessing response bias among STD clinic patients.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=1745029\&tool=pmcentrez\&rendertype=abstract}, volume = {81}, year = {2005} } @article{Riek2009, author = {Riek, Laurel D. and Paul, Philip C. and Robinson, Peter}, doi = {10.1007/s12193-009-0028-2}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Riek, Paul, Robinson - 2009 - When my robot smiles at me Enabling human-robot rapport via real-time head gesture mimicry.pdf:pdf}, issn = {1783-7677}, journal = {Journal on Multimodal User Interfaces}, keywords = {19,affective computing,emotionally conveying,empathy,expressions,facial,forms of expressive empathy,human-robot interaction,is known as,of the most basic,one,social robotics,understand what others are}, month = nov, number = {1-2}, pages = {99--108}, title = {{When my robot smiles at me: Enabling human-robot rapport via real-time head gesture mimicry}}, url = {http://www.springerlink.com/index/10.1007/s12193-009-0028-2}, volume = {3}, year = {2009} } @article{Baylor2009, abstract = {Anthropomorphic virtual agents can serve as powerful technological mediators to impact motivational outcomes such as self-efficacy and attitude change. Such anthropomorphic agents can be designed as simulated social models in the Bandurian sense, providing social influence as virtual role models. Of particular value is the capacity for designing such agents as optimized social models for a target audience and context. Importantly, the visual presence and appearance of such agents can have a major impact on motivation and affect regardless of the underlying technical sophistication. Empirical results of different instantiations of agent presence and appearance are reviewed for both autonomous virtual agents and avatars that represent a user.}, author = {Baylor, Amy L}, institution = {Department of Educational Psychology and Learning Systems, Florida State University, Tallahassee, FL 32306, USA. abaylor@fsu.edu}, journal = {Philosophical Transactions of the Royal Society of London - Series B: Biological Sciences}, keywords = {humans,models,motivation,psychological,social environment,user computer interface}, number = {1535}, pages = {3559--3565}, publisher = {The Royal Society}, title = {{Promoting motivation with virtual agents and avatars: role of visual presence and appearance}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=2781889\&tool=pmcentrez\&rendertype=abstract}, volume = {364}, year = {2009} } @inproceedings{Denef2009, abstract = {This thesis investigates the design of human computer interaction techniques for ubiquitous computing solutions in firefighting.}, address = {Uppsala, Sweden}, author = {Denef, Sebastian}, booktitle = {INTERACT '09 Proceedings of the 12th IFIP TC 13 International Conference on Human-Computer Interaction: Part II}, doi = {10.1007/978-3-642-03658-3\_97}, editor = {Gross, Tom and Gulliksen, Jan and Kotz\'{e}, Paula and Oestreicher, Lars and Palanque, Philippe and Prates, Raquel Oliveira and Winckler, Marco}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Denef - 2009 - Human-Computer Interaction Techniques in Firefighting.pdf:pdf}, pages = {864--867}, publisher = {Springer Berlin / Heidelberg}, title = {{Human-Computer Interaction Techniques in Firefighting}}, url = {http://www.springerlink.com/index/n0688783567n3251.pdf http://dl.acm.org/citation.cfm?id=1616339}, year = {2009} } @article{Clayman2001, abstract = {This article provides an overview of the dynamics of answering and resisting or evading questions in broadcast news interviews. After a preliminary examination of the practices through which answers are recognizably constructed, the analysis turns to the practices through which interviewees manage responses that resist the agenda of an interviewer's question. When resisting overtly, interviewees engage in various forms of damage control. When resisting covertly, interviewees take steps to render the resistance less conspicuous. Both sets of practices facilitate resistant responses by reducing the negative consequences that might otherwise follow. Such practices demonstrate that, although interviewees have developed practices for resisting questions, the norm of answering remains a salient feature of the contemporary broadcast news interview.}, author = {Clayman, Steven E}, journal = {Language in Society}, number = {03}, pages = {403--442}, title = {{Answers and evasions}}, volume = {30}, year = {2001} } @article{Meltzoff1977, abstract = {Infants between 12 and 21 days of age can imitate both facial and manual gestures; this behavior cannot be explained in terms of either conditioning or innate releasing mechanisms. Such imitation implies that human neonates can equate their own unseen behaviors with gestures they see others perform.}, author = {Meltzoff, AN}, doi = {10.1126/science.198.4312.75}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Meltzoff - 1977 - Imitation of facial and manual gestures by human neonates.pdf:pdf}, issn = {0036-8075}, journal = {Science}, month = oct, number = {4312}, pages = {75--8}, pmid = {17741897}, title = {{Imitation of facial and manual gestures by human neonates}}, volume = {198}, year = {1977} } @article{Bickmore2010b, abstract = {We discuss issues in designing virtual humans for applications which require long-term voluntary use, and the problem of maintaining engagement with users over time. Concepts and theories related to engagement from a variety of disciplines are reviewed. We describe a platform for conducting studies into long-term interactions between humans and virtual agents, and present the results of two longitudinal randomized controlled experiments in which the effect of manipulations of agent behavior on user engagement was assessed.}, author = {Bickmore, Timothy Wallace and Schulman, Daniel and Yin, Langxuan}, doi = {10.1080/08839514.2010.492259}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bickmore, Schulman, Yin - 2010 - Maintaining Engagement in Long-term Interventions with Relational Agents.pdf:pdf}, issn = {0883-9514}, journal = {Applied artificial intelligence (AAI)}, month = jul, number = {6}, pages = {648--666}, pmid = {21318052}, title = {{Maintaining Engagement in Long-term Interventions with Relational Agents.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=3035950\&tool=pmcentrez\&rendertype=abstract}, volume = {24}, year = {2010} } @article{ArthurJ.Clark2010, abstract = {Expanding on a framework introduced by Carl Rogers, an integral model of empathy in counseling uses empathic understanding through 3 ways of knowing: Subjective empathy enables a counselor to momentarily experience what it is like to be a client, interpersonal empathy relates to understanding a client's phenomenological experiencing, and objective empathy uses reputable knowledge sources outside of a client's frame of reference. Across the counseling process, empathy is integral to treatment strategies and interventions.}, author = {{Arthur J. Clark}}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Arthur J. Clark - 2010 - Empathy An integral model in the counseling process.pdf:pdf}, journal = {Journal of Counseling \& Development}, keywords = {Counseling,Counseling Techniques,Counselor Client Relationship,Empathy,Models}, number = {3}, pages = {348 -- 356}, title = {{Empathy: An integral model in the counseling process}}, url = {http://aca.metapress.com/link.asp?id=075658qt56l20466}, volume = {88}, year = {2010} } @article{Held1992, author = {Held, R. and Durlach, N.}, journal = {Presence: Teleop- erators and Virtual Environments}, number = {1}, pages = {109--112}, title = {{Telepresence}}, volume = {1}, year = {1992} } @inproceedings{Paiva2004, address = {Washington, DC, USA}, author = {Paiva, Ana and Dias, J. and Sobral, Daniel and Aylett, Ruth}, booktitle = {AAMAS '04 Proceedings of the Third International Joint Conference on Autonomous Agents and Multiagent Systems}, doi = {10.1109/AAMAS.2004.82}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Paiva et al. - 2004 - Caring for agents and agents that care Building empathic relations with synthetic agents.pdf:pdf}, isbn = {1581138644}, pages = {194--201}, publisher = {IEEE Computer Society}, title = {{Caring for agents and agents that care: Building empathic relations with synthetic agents}}, url = {http://dl.acm.org/citation.cfm?id=1018754 http://dx.doi.org/10.1109/AAMAS.2004.82}, year = {2004} } @article{Hess2001, author = {Hess, Ursula and Blairy, Sylvie}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hess, Blairy - 2001 - Facial mimicry and emotional contagion to dynamic emotional facial expressions and their influence on decoding acc.pdf:pdf}, journal = {International Journal of Psychophysiology}, keywords = {emotion recognition,emotional contagion,facial mimicry}, pages = {129--141}, title = {{Facial mimicry and emotional contagion to dynamic emotional facial expressions and their influence on decoding accuracy}}, volume = {40}, year = {2001} } @article{Pennebaker2003, abstract = {The words people use in their daily lives can reveal important aspects of their social and psychological worlds. With advances in computer technology, text analysis allows researchers to reliably and quickly assess features of what people say as well as subtleties in their linguistic styles. Following a brief review of several text analysis programs, we summarize some of the evidence that links natural word use to personality, social and situational fluctuations, and psychological interventions. Of particular interest are findings that point to the psychological value of studying particles-parts of speech that include pronouns, articles, prepositions, conjunctives, and auxiliary verbs. Particles, which serve as the glue that holds nouns and regular verbs together, can serve as markers of emotional state, social identity, and cognitive styles.}, author = {Pennebaker, James W and Mehl, Matthias R and Niederhoffer, Kate G}, doi = {10.1146/annurev.psych.54.101601.145041}, editor = {Guo, Li}, isbn = {0973403942}, issn = {00664308}, journal = {Annual Review of Psychology}, keywords = {artificial intelligence,emotions,health status,humans,individuality,interpersonal relations,mental health,natural language processing,personality,psycholinguistics,semantics,social environment,verbal behavior}, number = {1}, pages = {547--77}, pmid = {12185209}, publisher = {Annual Reviews}, title = {{Psychological aspects of natural language. use: our words, our selves.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/12185209}, volume = {54}, year = {2003} } @article{Rollnick2001, author = {Rollnick, S}, doi = {10.1080/09652140120089517}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rollnick - 2001 - Comments on Dunn et al.'s The use of brief interventions adapted from motivational interviewing across behavioral doma.pdf:pdf}, issn = {0965-2140}, journal = {Addiction (Abingdon, England)}, keywords = {Behavior Therapy,Behavior Therapy: methods,Humans,Motivation,Psychotherapy, Brief,Psychotherapy, Brief: methods,Treatment Outcome}, month = dec, number = {12}, pages = {1769--70; discussion 1774--5}, pmid = {11784469}, title = {{Comments on Dunn et al.'s "The use of brief interventions adapted from motivational interviewing across behavioral domains: a systematic review". Enthusiasm, quick fixes and premature controlled trials.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/11784469}, volume = {96}, year = {2001} } @article{Wasfy2004, abstract = {An intelligent virtual environment is described for training users in the operation of complex engineering systems. The environment combines an intelligent agent facility, for tutoring, guiding and/or supervising the training; an object-oriented virtual environment engine, for displaying the engineering system; and a simulator, for simulating the system controls. The intelligent agent facility includes: (a) a hierarchical process knowledge base, (b) a rule-based expert system for natural language understanding, and (c) a human-like virtual characters engine. Three types of objects are used for representing the process knowledge, namely, processes, steps, and constraints. An application of the environment to the interactive training for operating a NASA wind tunnel is described. Two agents in the environment can perform several functions, including conducting an interactive virtual tour of the facility; guiding and supervising the training, as well as certifying the trainee.}, author = {Wasfy, Ayman and Wasfy, Tamer and Noor, Ahmed}, doi = {10.1016/j.advengsoft.2004.04.005}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wasfy, Wasfy, Noor - 2004 - Intelligent virtual environment for process training.pdf:pdf}, issn = {09659978}, journal = {Advances in Engineering Software}, keywords = {intelligent agents,natural language processing,virtual reality,virtual training environments}, month = jun, number = {6}, pages = {337--355}, title = {{Intelligent virtual environment for process training}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0965997804000390}, volume = {35}, year = {2004} } @book{Bull1987, abstract = {The purpose of this book is to present the results of a series of studies carried out by the author over a number of years, sharing a common focus on the role of posture and gesture in interpersonal communication. The first section of the book is intended to set these studies in the general context of non-verbal communication research; in addition, previous research on posture and gesture is reviewed in order to highlight the particular issues which were chosen as the focus of research to be reported here. Techniques of measurement are also discussed, and two scoring procedures are presented which were devised by the author for the purpose of categorizing posture and gesture. In the second and third parts of the volume are presented the results of eleven original studies of posture and gesture carried out by the author. The six experiments reported in Part II were concerned with the extent of which posture communicates information about listener emotions and attitudes, the seven studies reported in Part III were concerned with the relationship between posture, gesture and speech. The final section of the book (Part IV) is intended to summarize the main findings from the studies presented in this volume, to discuss their theoretical and practical significance, and to consider their implications for the way in which research on non-verbal communication is carried out.}, author = {Bull, Peter}, booktitle = {Encoding of Disagreement and Agreement}, pages = {1--6}, publisher = {Pergamon Books}, series = {International Series in Experimental Social Psychology}, title = {{Posture and Gesture}}, url = {http://eprints.whiterose.ac.uk/74117/}, volume = {16}, year = {1987} } @phdthesis{Klein1999, author = {Klein, Jonathan T}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Klein - 1999 - Computer Response to User Frustration.pdf:pdf}, school = {Massachusetts Institute of Technology}, title = {{Computer Response to User Frustration}}, type = {In partial fulfillment of the requirements for the degree of Master of Science in Media Arts and Sciences}, year = {1999} } @inproceedings{Hernandez2007, abstract = {In this paper we explore the possibilities that conversational agent technology offers for the improvement of the quality of hu- man-machine interaction in a concrete area of application: the multimodal biometric authentication system. Our approach looks at the user perception effects related to the system interface rather than to the perform- ance of the biometric technology itself. For this purpose we have created a multibio- metric user test environment with two dif- ferent interfaces or interaction metaphors: one with an embodied conversational agent and the other with on-screen text messages only. We present the results of an explora- tory experiment that reveals interesting ef- fects, related to the presence of a conversa- tional agent, on the user’s perception of pa- rameters such as privacy, ease of use, inva- siveness or system security.}, address = {Prague, Czech Republic}, author = {Hern\'{a}ndez, \'{A}lvaro and L\'{o}pez, Beatriz and D\'{\i}az, David and Fern\'{a}ndez, Rub\'{e}n and Hern\'{a}ndez, Luis and Caminero, Javier}, booktitle = {Proceedings of the Workshop on Embodied Language Processing}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hern\'{a}ndez et al. - 2007 - A “ person ” in the interface effects on user perceptions of multibiometrics.pdf:pdf}, pages = {33--40}, publisher = {Association for Computational Linguistics}, title = {{A “ person ” in the interface : effects on user perceptions of multibiometrics}}, year = {2007} } @article{Miller1995b, author = {Miller, George A.}, journal = {Communications of the ACM}, number = {11}, pages = {39--41}, title = {{WordNet: A Lexical Database for English}}, volume = {38}, year = {1995} } @article{Wicker2003, abstract = {What neural mechanism underlies the capacity to understand the emotions of others? Does this mechanism involve brain areas normally involved in experiencing the same emotion? We performed an fMRI study in which participants inhaled odorants producing a strong feeling of disgust. The same participants observed video clips showing the emotional facial expression of disgust. Observing such faces and feeling disgust activated the same sites in the anterior insula and to a lesser extent in the anterior cingulate cortex. Thus, as observing hand actions activates the observer's motor representation of that action, observing an emotion activates the neural representation of that emotion. This finding provides a unifying mechanism for understanding the behaviors of others.}, author = {Wicker, Bruno and Keysers, Christian and Plailly, Jane and Royet, Jean Pierre and Gallese, Vittorio and Rizzolatti, Giacomo}, doi = {10.1016/S0896-6273(03)00679-2}, institution = {Institut de Neurosciences Physiologiques et Cognitives, CNRS, Chemin Joseph Aiguier, 13402 cedex 20, Marseille, France.}, issn = {08966273}, journal = {Neuron}, keywords = {adult,brain mapping,cerebral cortex,cerebral cortex anatomy \& histology,cerebral cortex physiology,chemical,computer assisted,emotions,emotions physiology,facial expression,humans,image processing,magnetic resonance imaging,magnetic resonance imaging methods,male,ocular,ocular physiology,odors,photic stimulation,random allocation,stimulation,vision}, number = {3}, pages = {655--64}, pmid = {14642287}, publisher = {Elsevier}, title = {{Both of us disgusted in My insula: the common neural basis of seeing and feeling disgust.}}, volume = {40}, year = {2003} } @article{Kendon1967, abstract = {Films of two-person conversations were transcribed and analyzed from the point of view of how gaze direction is related to utterance and silence. It was found that patterns of looking were systematically related to features of talk and could be accounted for in terms of the monitoring functions of gaze. At the same time, evidence was presented that suggested that gaze direction may also play a role in the regulation of turn-taking in conversation.}, author = {Kendon, Adam}, doi = {10.1016/0001-6918(67)90005-}, issn = {00016918}, journal = {Acta Psychologica}, number = {1}, pages = {22--63}, publisher = {Elsevier BV, Radarweg 29, Amsterdam, 1043 NX, Netherlands,}, title = {{Some functions of gaze-direction in social interaction.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/6043092}, volume = {26}, year = {1967} } @article{Prochaska1997, abstract = {The transtheoretical model posits that health behavior change involves progress through six stages of change: precontemplation, contemplation, preparation, action, maintenance, and termination. Ten processes of change have been identified for producing progress along with decisional balance, self-efficacy, and temptations. Basic research has generated a rule of thumb for at-risk populations: 40\% in precontemplation, 40\% in contemplation, and 20\% in preparation. Across 12 health behaviors, consistent patterns have been found between the pros and cons of changing and the stages of change. Applied research has demonstrated dramatic improvements in recruitment, retention, and progress using stage-matched interventions and proactive recruitment procedures. The most promising outcomes to data have been found with computer-based individualized and interactive interventions. The most promising enhancement to the computer-based programs are personalized counselors. One of the most striking results to date for stage-matched programs is the similarity between participants reactively recruited who reached us for help and those proactively recruited who we reached out to help. If results with stage-matched interventions continue to be replicated, health promotion programs will be able to produce unprecedented impacts on entire at-risk populations.}, author = {Prochaska, J O and Velicer, W F}, chapter = {5}, doi = {10.4278/0890-1171-12.1.38}, editor = {Shumaker, S A and Schron, E B}, institution = {Cancer Prevention Research Center, University of Rhode Island, Kingston 02881-0808, USA. JOP@URIACC.URI.EDU}, issn = {08901171}, journal = {American Journal Of Health Promotion}, number = {1}, pages = {38--48}, publisher = {American Journal of Health Promotion P.O. Box 1897, 810 East 10th Street, Lawrence, KS 66044-8897}, title = {{The transtheoretical model of health behavior change}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/10170434}, volume = {12}, year = {1997} } @book{Deng2008, address = {London}, author = {Deng, Zhigang and Neumann, Ulrich}, editor = {Deng, Zhigang and Neumann, Ulrich}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Deng, Neumann - 2008 - Data-Driven 3D Facial Animation.pdf:pdf}, isbn = {9781846289064}, publisher = {Springer-Verlag}, title = {{Data-Driven 3D Facial Animation}}, year = {2008} } @article{Sarrafzadeh2006, author = {Sarrafzadeh, Abdolhossein and Alexander, Samuel and Dadgostar, Farhad and Fan, Chao and Bigdeli, Abbas}, doi = {10.1109/INNOVATIONS.2006.301981}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sarrafzadeh et al. - 2006 - See Me, Teach Me Facial Expression and Gesture Recognition for Intelligent Tutoring Systems.pdf:pdf}, isbn = {1-4244-0673-0}, journal = {2006 Innovations in Information Technology}, keywords = {affective,affective computing,affective tutoring systems,agents,emotion detection,human computer,interaction,lifelike,new type of its,proposed by the authors}, month = nov, pages = {1--5}, publisher = {Ieee}, title = {{See Me, Teach Me: Facial Expression and Gesture Recognition for Intelligent Tutoring Systems}}, year = {2006} } @article{Ekman1993, abstract = {Cross-cultural research on facial expression and the developments of methods to measure facial expression are briefly summarized. What has been learned about emotion from this work on the face is then elucidated. Four questions about facial expression and emotion are discussed: What information does an expression typically convey? Can there be emotion without facial expression? Can there be a facial expression of emotion without emotion? How do individuals differ in their facial expressions of emotion?}, author = {Ekman, Paul}, institution = {Human Interaction Laboratory, University of California, San Francisco 94143.}, journal = {American Psychologist}, number = {4}, pages = {384--392}, pmid = {8512154}, publisher = {American Psychological Association}, title = {{Facial expression and emotion.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/8512154}, volume = {48}, year = {1993} } @article{Klein2002, abstract = {Use of technology often has unpleasant side effects, which may include strong, negative emotional states that arise during interaction with computers. Frustration, confusion, anger, anxiety and similar emotional states can affect not only the interaction itself, but also productivity, learning, social relationships, and overall well-being. This paper suggests a new solution to this problem: designing human–computer interaction systems to actively support users in their ability to manage and recover from negative emotional states. An interactive affect–support agent was designed and built to test the proposed solution in a situation where users were feeling frustration. The agent, which used only text and buttons in a graphical user interface for its interaction, demonstrated components of active listening, empathy, and sympathy in an effort to support users in their ability to recover from frustration. The agent's effectiveness was evaluated against two control conditions, which were also text-based interactions: (1) users’ emotions were ignored, and (2) users were able to report problems and ‘vent’ their feelings and concerns to the computer. Behavioral results showed that users chose to continue to interact with the system that had caused their frustration significantly longer after interacting with the affect–support agent, in comparison with the two controls. These results support the prediction that the computer can undo some of the negative feelings it causes by helping a user manage his or her emotional state.}, author = {Klein, J and Moon, Y and {Rosalind W. Picard}}, doi = {http://dx.doi.org/10.1016/S0953-5438(01)00053-4}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Klein, Moon, Rosalind W. Picard - 2002 - This computer responds to user frustration Theory, design, and results.pdf:pdf}, journal = {Interacting with computers}, keywords = {affect,affective computing,empathetic interface,frustration,human-centred design,social interface,user emotion}, number = {2}, pages = {119--140}, publisher = {Elsevier Science Ltd}, title = {{This computer responds to user frustration: Theory, design, and results}}, url = {http://www.sciencedirect.com/science/article/pii/S0953543801000534}, volume = {14}, year = {2002} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @inproceedings{Hubal2003, abstract = {This paper describes lessons learned in developing the linguistic, cognitive, emotional, and gestural models underlying virtual human behavior in a training application designed to train civilian police officers how to recognize gestures and verbal cues indicating different forms of mental illness and how to verbally interact with the mentally ill. Schizophrenia, paranoia, and depression were all modeled for the application. For linguistics, the application has quite complex language grammars that captured a range of syntactic structures and semantic categories. For cognition, there is a great deal of augmentation to a plan-based transition network needed to model the virtual human’s knowledge. For emotions and gestures, virtual human behavior is based on expert-validated mapping tables specific to each mental illness. The paper presents five areas demanding continued research to improve virtual human behavior for use in training applications.}, address = {Miami, FL, US}, author = {Hubal, Robert C and Frank, Geoffrey A and Guinn, Curry I}, booktitle = {Proceedings of the 2003 International Conference on Intelligent User Interfaces (IUI'03)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hubal, Frank, Guinn - 2003 - Lessons Learned in Modeling Schizophrenic and Depressed Responsive Virtual Humans for Training.pdf:pdf}, isbn = {1581135866}, keywords = {agents,behavior modeling,interaction skills training,managing encounters with the,mentally ill,responsive virtual humans}, pages = {85--92}, publisher = {ACM}, title = {{Lessons Learned in Modeling Schizophrenic and Depressed Responsive Virtual Humans for Training}}, year = {2003} } @article{Johnstone2000, abstract = {This chapter provides a comprehensive overview of the current state of the literature on the vocal communication of emotion. It highlights some of the many evolutionary, physiological, cognitive, social, and cultural factors which shape the way humans express and perceive emotions in speech. With such a large and seemingly disparate number of determinants, it might seem as if the topic were too messy to expect any invariance in empirical findings. Perhaps surprisingly however, the summary of research into the production and perception of emotional speech has revealed considerable consistency. On the production side, the evidence is starting to accumulate that humans consistently modify their speech in specific ways to express different emotions. The major acoustic parameters are described and the relevant literature reviewed. Results of perception studies indicate that emotions expressed in speech are to a large extent successfully detected by a variety of populations, on the basis of an experimentally identifiable set of acoustic parameters. The differences in recognition accuracy between different emotions are discussed. The consistency in the results is no doubt partly because most research to date has been limited to settings in which many of the factors described above have been eliminated or controlled for. In addition to further refinement of analysis techniques and a focus on real, as well as acted, emotional speech, there is clearly a need for studies that better quantify the relative contribution of culture, language and social strategy to the vocal comunication of emotion. To address these issues in a manner that allows results from different studies to be integrated and compared, a coordinated, interdisciplinary approach to research on the vocal communication of emotion will be required.}, author = {Johnstone, Tom and Scherer, Klaus R.}, chapter = {14}, editor = {Lewis, M and Haviland-Jones, J M}, journal = {Handbook of emotions}, number = {1-2}, pages = {220--235}, publisher = {The Guilford Press}, title = {{Vocal communication of emotion}}, url = {http://centaur.reading.ac.uk/4362/}, volume = {2}, year = {2000} } @misc{Ma2005, abstract = {This short paper contains a preliminary description of a novel type of chat system that aims at realizing natural and social communication between distant communication partners. The system is based on an emotion estimation module that assesses the affective content of textual messages. Avatars associated with chat partners act out the assessed emotions of messages through multiple modalities, including synthetic speech and affect-related gestures.}, author = {Ma, C and Osherenko, A and Prendinger, Helmut and Ishizuka, M}, booktitle = {Proceedings of the 2005 International Conference on Active Media Technology 2005 AMT 2005}, doi = {10.1109/AMT.2005.1505418}, isbn = {0780390350}, number = {i}, pages = {546--548}, publisher = {Ieee}, title = {{A chat system based on emotion estimation from text and embodied conversational messengers}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=1505418}, year = {2005} } @article{Bailenson2001, abstract = {During the last half of the twentieth century, psychologists and anthropologists have studied proxemics, or spacing behavior, among people in many contexts. As we enter the twenty-rst century, immersive virtual environment technology promises new experimental venues in which researchers can study proxemics. Immersive virtual environments provide realistic and compelling experimental settings without sacricing experimental control. The experiment reported here tested Argyle and Dean’s (1965) equilibrium theory’s specication of an inverse relationship between mutual gaze, a nonverbal cue signaling intimacy, and interpersonal distance. Partici- pants were immersed in a three-dimensional virtual room in which a virtual human representation (that is, an embodied agent) stood. Under the guise of a memory task, participants walked towards and around the agent. Distance between the par- ticipant and agent was tracked automatically via our immersive virtual environment system. All participants maintained more space around agents than they did around similarly sized and shaped but nonhuman-like objects. Female participants main- tained more interpersonal distance between themselves and agents who engaged them in eye contact (that is, mutual gaze behavior) than between themselves and agents who did not engage them in eye contact, whereas male participants did not. Implications are discussed for the study of proxemics via immersive virtual environ- ment technology, as well as the design of virtual environments and virtual humans}, author = {Bailenson, Jeremy N and Blascovich, Jim and Beall, Andrew C and Loomis, Jack M}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bailenson et al. - 2001 - Equilibrium Theory Revisited Mutual Gaze and Personal Space.pdf:pdf}, journal = {Presence}, number = {6}, pages = {583--598}, title = {{Equilibrium Theory Revisited : Mutual Gaze and Personal Space}}, volume = {10}, year = {2001} } @inproceedings{Schrammel2007, abstract = {This paper describes the results of three studies investigating an embodied agent that supports its interaction with the user by gazing at corresponding objects within its close environment. Three experiments were conducted in order to research whether users can detect an agent’s line of sight, whether the agent’s gaze direction can help to guide the users' attention towards designated locations and whether such a setup can be used to improve realistic interaction situations. The results show that a) users can detect the agent’s gaze direction quickly (within 200 ms) but not very exactly, b) the use of the agent’s gaze direction can speed up but also slow down the detection of objects in dependence on their location and c) that the agent’s gaze towards corresponding objects during the interaction can have counterproductive effects in realistic settings.}, address = {San Jose, California, USA.}, author = {Schrammel, Johann and Sefelin, Reinhard and Tscheligi, Manfred}, booktitle = {CHI 2007 Proceedings of People, Looking at People April}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Schrammel, Sefelin, Tscheligi - 2007 - “ Look !” – Using the Gaze Direction of Embodied Agents.pdf:pdf}, isbn = {9781595935939}, keywords = {Computer Vision,Embodied Agent,Gaze Direction}, pages = {1187--1190}, publisher = {ACM}, title = {{“ Look !” – Using the Gaze Direction of Embodied Agents}}, year = {2007} } @article{Witmer1998, author = {Witmer, Bob G and Singer, Michael J.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Witmer, Singer - 1998 - Measuring Presence in Virtual Environments A Presence.pdf:pdf}, journal = {Presence: Teleoperators and Virtual Environments}, number = {3}, pages = {225--240}, title = {{Measuring Presence in Virtual Environments : A Presence}}, volume = {7}, year = {1998} } @article{Ong1995, abstract = {Communication can be seen as the main ingredient in medical care. In reviewing doctor-patient communication, the following topics are addressed: (1) different purposes of medical communication; (2) analysis of doctor-patient communication; (3) specific communicative behaviors; (4) the influence of communicative behaviors on patient outcomes; and (5) concluding remarks. Three different purposes of communication are identified, namely: (a) creating a good inter-personal relationship; (b) exchanging information; and (c) making treatment-related decisions. Communication during medical encounters can be analyzed by using different interaction analysis systems (IAS). These systems differ with regard to their clinical relevance, observational strategy, reliability/validity and channels of communicative behavior. Several communicative behaviors that occur in consultations are discussed: instrumental (cure oriented) vs affective (care oriented) behavior, verbal vs non-verbal behavior, privacy behavior, high vs low controlling behavior, and medical vs everyday language vocabularies. Consequences of specific physician behaviors on certain patient outcomes, namely: satisfaction, compliance/adherence to treatment, recall and understanding of information, and health status/psychiatric morbidity are described. Finally, a framework relating background, process and outcome variables is presented.}, author = {Ong, L M and {De Haes}, J C and Hoos, A M and Lammes, F B}, doi = {10.1016/0277-9536(94)00155-M}, institution = {Department of Medical Psychology, Academic Medical Hospital, Amsterdam, The Netherlands.}, issn = {02779536}, journal = {Social Science \& Medicine (1982)}, keywords = {communication,humans,patient education topic,physician patient relations,treatment outcome}, number = {7}, pages = {903--18}, pmid = {7792630}, publisher = {Elsevier}, title = {{Doctor-patient communication: a review of the literature.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18666041}, volume = {40}, year = {1995} } @article{Emmons2001, abstract = {Motivational interviewing (MI) has been well studied in specialist settings. There has been considerable interest in applying MI to community health care settings. Such settings represent a significant departure from the more traditional, specialist settings in which MI has been developed and tested. The purpose of this paper is to provide a brief overview of MI and to identify and discuss the key issues that are likely to arise when adapting this approach to health care and public health settings. This paper provides an overview of important issues to consider in adapting an effective counseling strategy to new settings, and is intended to begin a dialogue about the use of MI in community health care settings.}, author = {Emmons, K M and Rollnick, S}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Emmons, Rollnick - 2001 - Motivational interviewing in health care settings. Opportunities and limitations.pdf:pdf}, issn = {0749-3797}, journal = {American journal of preventive medicine}, keywords = {Adult,Attitude of Health Personnel,Community Health Services,Community Health Services: standards,Community Health Services: trends,Female,Health Care Surveys,Humans,Interviews as Topic,Interviews as Topic: methods,Male,Motivation,Outcome Assessment (Health Care),Preventive Medicine,Preventive Medicine: standards,Preventive Medicine: trends,United States}, month = jan, number = {1}, pages = {68--74}, pmid = {11137778}, title = {{Motivational interviewing in health care settings. Opportunities and limitations.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/11137778}, volume = {20}, year = {2001} } @article{DeVignemont2006, abstract = {Recent imaging results suggest that individuals automatically share the emotions of others when exposed to their emotions. We question the assumption of the automaticity and propose a contextual approach, suggesting several modulatory factors that might influence empathic brain responses. Contextual appraisal could occur early in emotional cue evaluation, which then might or might not lead to an empathic brain response, or not until after an empathic brain response is automatically elicited. We propose two major roles for empathy; its epistemological role is to provide information about the future actions of other people, and important environmental properties. Its social role is to serve as the origin of the motivation for cooperative and prosocial behavior, as well as help for effective social communication.}, author = {de Vignemont, Frederique and Singer, Tania}, doi = {10.1016/j.tics.2006.08.008}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/de Vignemont, Singer - 2006 - The empathic brain how, when and why.pdf:pdf}, issn = {1364-6613}, journal = {Trends in cognitive sciences}, keywords = {Automatism,Automatism: psychology,Brain,Brain Mapping,Brain: physiology,Cerebral,Cerebral Cortex,Cerebral Cortex: physiology,Cerebral: physiology,Communication,Cooperative Behavior,Cues,Dominance,Emotions,Emotions: physiology,Empathy,Female,Gyrus Cinguli,Gyrus Cinguli: physiology,Humans,Interpersonal Relations,Magnetic Resonance Imaging,Male,Motivation,Nerve Net,Nerve Net: physiology,Neurons,Neurons: physiology,Social Behavior,Social Environment}, month = oct, number = {10}, pages = {435--41}, pmid = {16949331}, title = {{The empathic brain: how, when and why?}}, volume = {10}, year = {2006} } @inproceedings{Vertegaal2003, abstract = {GAZE-2 is a novel group video conferencing system that uses eye-controlled camera direction to ensure parallax- free transmission of eye contact. To convey eye contact, GAZE-2 employs a video tunnel that allows placement of cameras behind participant images on the screen. To avoid parallax, GAZE-2 automatically directs the cameras in this video tunnel using an eye tracker, selecting a single camera closest to where the user is looking for broadcast. Images of users are displayed in a virtual meeting room, and rotated towards the participant each user looks at. This way, eye contact can be conveyed to any number of users with only a single video stream per user. We empirically evaluated whether eye contact perception is affected by automated camera direction, which causes angular shifts in the transmitted images. Findings suggest camera shifts do not affect eye contact perception, and are not considered highly distractive.}, address = {Fort Lauderdale, Florida, USA}, author = {Vertegaal, Roel and Weevers, Ivo and Sohn, Changuk and Cheung, Chris}, booktitle = {SIGCHI Conference on Human Factors in Computing Systems (CHI'03)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Vertegaal et al. - 2003 - GAZE-2 conveying eye contact in group video conferencing using eye-controlled camera direction.pdf:pdf}, isbn = {1581136307}, keywords = {attentive user interfaces,conferencing,eye contact,eye tracking,gaze,multiparty video}, number = {5}, pages = {521--528}, publisher = {ACM Press}, title = {{GAZE-2: conveying eye contact in group video conferencing using eye-controlled camera direction}}, url = {http://dl.acm.org/citation.cfm?id=642702}, year = {2003} } @article{Roter1998, abstract = {Objectives. This article summarizes the results of 153 studies published between 1977 and 1994 that evaluated the effectiveness of interventions to improve patient compliance with medical regimens. Methods. The compliance interventions were classified by theoretical focus into educational, behavioral, and affective categories within which specific intervention strategies were further distinguished. The compliance indicators broadly represent five classes of compliance-related assessments: (1) health outcomes (eg, blood pressure and hospitalization), (2) direct indicators (eg, urine and blood tracers and weight change), (3) indirect indicators (eg, pill count and refill records), (4) subjective report (eg, patients' or others' reports), and (5) utilization (appointment making and keeping and use of preventive services). An effect size (ES) r, defined as Fisher's Z transformation of the Pearson correlation coefficient, representing the association between each intervention (intervention versus control) and compliance measure was calculated. Both an unweighted and weighted r were calculated because of large sample size variation, and a combined probability across studies was calculated. Results. The interventions produced significant effects for all the compliance indicators (combined Z values more than 5 and less than 32), with the magnitude of effects ranging from small to large. The largest effects (unweighted) were evident for refill records and pill counts and in blood/urine and weight change studies. Although smaller in magnitude, compliance effects were evident for improved health outcomes and utilization. Chronic disease patients, including those with diabetes and hypertension, as well as cancer patients and those with mental health problems especially benefited from interventions. Conclusions. No single strategy or programmatic focus showed any clear advantage compared with another. Comprehensive interventions combining cognitive, behavioral, and affective components were more effective than single-focus interventions.}, author = {Roter, Debra L and Hall, Judith A and Merisca, Rolande and Nordstrom, Beth and Cretin, Deborah and Svarstad, Bonnie}, issn = {00257079}, journal = {Medical Care}, number = {8}, pages = {1138--1161}, publisher = {Lippincott Williams \& Wilkins}, title = {{Effectiveness of interventions to improve patient compliance: a meta-analysis}}, url = {http://journals.lww.com/lww-medicalcare/Abstract/1998/08000/Effectiveness\_of\_Interventions\_to\_Improve\_Patient.4.aspx}, volume = {36}, year = {1998} } @article{Hall1988, abstract = {This article summarizes the results of 41 independent studies containing correlates of objectively measured provider behaviors in medical encounters. Provider behaviors were grouped a priori into the process categories of information giving, questions, competence, partnership building, and socioemotional behavior. Total amount of communication was also included. All correlations between variables within these categories and external variables (patient outcome variables or patient and provider background variables) were extracted. The most frequently occurring outcome variables were satisfaction, recall, and compliance, and the most frequently occurring background variables were the patient's gender, age, and social class. Average correlations and combined significance levels were calculated for each combination of process category and external variable. Results showed significant relations of small to moderate average magnitude between these external variables and almost all of the provider behavior categories. A theory of provider-patient reciprocation is proposed to account for the pattern of results.}, author = {Hall, Judith A and Roter, Debra L and Katz, N R}, institution = {Department of Psychology, Northeastern University, Boston, MA 02115.}, journal = {Medical Care}, keywords = {adult,ambulatory care,bibliography topic,communication,consumer satisfaction,female,humans,male,mental recall,patient compliance,physician patient relations,professional patient relations,social class}, number = {7}, pages = {657--675}, pmid = {3292851}, publisher = {Lippincott Williams \& Wilkins}, title = {{Meta-analysis of correlates of provider behavior in medical encounters.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/3292851}, volume = {26}, year = {1988} } @article{Bickmore2005, abstract = {This research investigates the meaning of “human-computer relationship” and presents techniques for constructing, maintaining, and evaluating such relationships, based on research in social psychology, sociolinguistics, communication and other social sciences. Contexts in which relationships are particularly important are described, together with specific benefits (like trust) and task outcomes (like improved learning) known to be associated with relationship quality. We especially consider the problem of designing for long-term interaction, and define relational agents as computational artifacts designed to establish and maintain long-term social-emotional relationships with their users. We construct the first such agent, and evaluate it in a controlled experiment with 101 users who were asked to interact daily with an exercise adoption system for a month. Compared to an equivalent task-oriented agent without any deliberate social-emotional or relationship-building skills, the relational agent was respected more, liked more, and trusted more, even after four weeks of interaction. Additionally, users expressed a significantly greater desire to continue working with the relational agent after the termination of the study. We conclude by discussing future directions for this research together with ethical and other ramifications of this work for HCI designers.}, author = {Bickmore, Timothy Wallace and Picard, Rosalind W.}, doi = {10.1145/1067860.1067867}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bickmore, Picard - 2005 - Establishing and maintaining long-term human-computer relationships.pdf:pdf}, journal = {ACM Transactions on Computer-Human Interaction (TOCHI)}, keywords = {Human-computer interaction,embodied conversational agent,relational agent,social interface}, number = {2}, pages = {617--638}, title = {{Establishing and maintaining long-term human-computer relationships}}, url = {http://dl.acm.org/citation.cfm?id=1067860.1067867}, volume = {12}, year = {2005} } @techreport{Gratch2010, author = {Gratch, Jonathan and Kang, Sin-hwa and Wang, Ning}, booktitle = {Imagine}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gratch, Kang, Wang - 2010 - Using social agents explore theories of rapport and emotional resonance.pdf:pdf}, institution = {University of Southern California}, number = {Chap X}, pages = {1--22}, title = {{Using social agents explore theories of rapport and emotional resonance}}, year = {2010} } @book{Leigh2006, author = {Leigh, R. John and Zee, David S.}, edition = {4}, isbn = {0195300904, 9780195300901}, publisher = {Oxford University Press}, title = {{The neurology of eye movements}}, year = {2006} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{Tavangarian2004, author = {Tavangarian, Djamshid and Leypold, Markus E. and N\"{o}lting, Kristin and R\"{o}ser, Marc and Voigt, Denny}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Tavangarian et al. - 2004 - Is e-learning the Solution for Individual Learning.pdf:pdf}, journal = {Electronic Journal of E-learning}, keywords = {constructivist learning model,document generation,e-learning system,learning objects}, number = {2}, pages = {273--280}, title = {{Is e-learning the Solution for Individual Learning}}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.116.8017\&rep=rep1\&type=pdf\&a=bi\&pagenumber=1\&w=100}, volume = {2}, year = {2004} } @book{Miller2012, address = {Mill Valley, CA}, author = {Miller, Ali}, publisher = {Psychotherapy.net}, title = {{Instructor's manual for Increasing Importance in Motivational Interviewing}}, year = {2012} } @article{Beck1981, author = {Beck, K H and Lund, A K}, journal = {Journal of Applied Social Psychology}, number = {5}, pages = {401--415}, title = {{The Effects of Health Threat Seriousness and Personal Efficacy upon Intentions and Behavior}}, volume = {11}, year = {1981} } @article{Kessous2009, author = {Kessous, Loic and Castellano, Ginevra and Caridakis, George}, doi = {10.1007/s12193-009-0025-5}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kessous, Castellano, Caridakis - 2009 - Multimodal emotion recognition in speech-based interaction using facial expression, body gesture.pdf:pdf}, issn = {1783-7677}, journal = {Journal on Multimodal User Interfaces}, keywords = {affective body language,affective speech,emotion recognition,facial expression,multimodal}, month = dec, number = {1-2}, pages = {33--48}, title = {{Multimodal emotion recognition in speech-based interaction using facial expression, body gesture and acoustic analysis}}, url = {http://www.springerlink.com/index/10.1007/s12193-009-0025-5}, volume = {3}, year = {2009} } @article{Lin2009, author = {Lin, Jzau-Sheng and Huang, Shi-Yuang and Pan, Kuo-Wen and Liu, Shao-Han}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lin et al. - 2009 - A physiological signal monitoring system based on an SoC platform and wireless network technologies in homecare tech.pdf:pdf}, journal = {Journal of Medical and Biomedical Engineering}, number = {1}, pages = {47--51}, title = {{A physiological signal monitoring system based on an SoC platform and wireless network technologies in homecare technology}}, url = {http://jmbe.bme.ncku.edu.tw/index.php/bme/article/viewArticle/337}, volume = {29}, year = {2009} } @article{Fuchs1987, abstract = {The impact of examiner/examinee familiarity and rapport on psychological test performance is reviewed. Drawing upon research involving hundreds of young handicapped and nonhandicapped children, it was found that certain handicapped children obtain higher scores when tested by familiar examiners. Implications for practice, theory, and personnel preparation are discussed.}, author = {Fuchs, Douglas}, journal = {Topics in Early Childhood Special Education}, number = {3}, pages = {90--104}, title = {{Examiner Familiarity Effects on Test Performance: Implications for Training and Practice}}, volume = {7}, year = {1987} } @inproceedings{Yasavur2013a, abstract = {This paper describes the design of a multimodal spoken di- alogue system using Markov Decision Processes (MDPs) to enable em- bodied conversational virtual health coach agents to deliver brief inter- ventions for lifestyle behavior change - in particular excessive alcohol consumption. Its contribution is two fold. First, it is the first attempt to-date to study stochastic dialogue policy optimization techniques in the health dialogue domain. Second, it provides a model for longer branch- ing dialogues (in terms of number of dialogue turns and number of slots) than the usual slot filling dialogue interactions currently available (e.g. tourist information domain). In addition, the model forms the basis for the generation of a richly annotated dialogue corpus, which is essential for applying optimization methods based on reinforcement learning.}, address = {Edinburgh, UK}, author = {Yasavur, Ugan and Lisetti, Christine and Rishe, Napthali}, booktitle = {13th International Conference on Intelligent Virtual Agents (IVA'13)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Yasavur, Lisetti, Rishe - 2013 - Modeling Brief Alcohol Intervention Dialogue with MDPs for Delivery by ECAs.pdf:pdf}, keywords = {alcoholism,at-risk,behavior change,brief intervention,eca,embodied conversational agent,forcement learning,intelligent,markov decision processes,rein-,spoken dialogue system,virtual agents}, publisher = {Springer's Lecture Notes in Computer Science/Lecture Notes in Artificial Intelligence (LNCS/LNAI) series}, title = {{Modeling Brief Alcohol Intervention Dialogue with MDPs for Delivery by ECAs}}, year = {2013} } @article{James1884, abstract = {The physiologists who , during the past few years , have been so industriously exploring the functions of the brain , have limited their attempts at explanation to its cognitive and volitional per- formances . Dividing the brain into sensorial and motor centers , they have found their division to be exactly paralleled by the analysis made by empirical psychology , of the perceptive and volitional parts of the mind into their simplest elements . But the aesthetic sphere of the mind , its longings , its pleasures and pains , and its emotions , have been so ignored in all these researches that one is tempted to suppose that if either Dr . Ferrier or Dr . Munk were asked for a theory in brain-terms of the latter mental facts , they might both reply , either that they had as yet bestowed no thought upon the subject , or that they had found it so difficult to make distinct hypotheses , that the matter lay for them among the problems of the future , only to be taken up after the simpler ones of the present should have been definitely solved . And yet it is even now certain that of two things concerning the emotions , one must be true . Either separate and special centers affected to them alone , are their brain-seat , or else they correspond to processes occurring in the motor and sensory centers , already assigned , or in others like them , not yet mapped out . If the for- mer be the case we must deny the current view , and hold the cortex to be something more than the surface of projection for every sensitive spot and every muscle in the body . If the latter be the case , we must ask whether the emotional process in the sensory or motor center be an altogether peculiar one , or whether it resembles the ordinary perceptive processes of which those centers are already recognized to be the seat . The purpose of the following pages is to show that the last alternative comes nearest to the truth , and that the emotional brain-processes not only}, author = {James, William}, chapter = {188}, issn = {00264423}, journal = {Mind}, number = {34}, pages = {188--205}, publisher = {JSTOR}, title = {{What is an Emotion?}}, url = {http://www.jstor.org/stable/2246769}, volume = {9}, year = {1884} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{ElAyadi2011, author = {{El Ayadi}, Moataz and Kamel, Mohamed S. and Karray, Fakhri}, doi = {10.1016/j.patcog.2010.09.020}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/El Ayadi, Kamel, Karray - 2011 - Survey on speech emotion recognition Features, classification schemes, and databases.pdf:pdf}, issn = {00313203}, journal = {Pattern Recognition}, month = mar, number = {3}, pages = {572--587}, title = {{Survey on speech emotion recognition: Features, classification schemes, and databases}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0031320310004619}, volume = {44}, year = {2011} } @book{Hojat2007, author = {Hojat, Mohammadreza}, booktitle = {Patient Care}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hojat - 2007 - Empathy in patient care antecedents, development, measurement, and outcomes.pdf:pdf}, isbn = {9780387336077}, publisher = {New York, NY: Springer}, title = {{Empathy in patient care: antecedents, development, measurement, and outcomes}}, year = {2007} } @book{Ericsson1993, address = {London}, author = {Ericsson, K.A. and Simon, H.A.}, edition = {Rev. Ed.}, publisher = {MIT Press}, title = {{Protocol analysis: Verbal reports as data}}, year = {1993} } @book{Carkhuff1969, address = {New York}, author = {Carkhuff, R.}, publisher = {Holt, Rinehart \& Winston}, title = {{Helping and human relations: Selection and training (Vol. 1)}}, year = {1969} } @inproceedings{Boukricha2011b, abstract = {Empathy is believed to play a major role as a basis for humans’ cooperative behavior. Recent research shows that humans empathize with each other to different degrees depending on several modulation factors including, among others, their social relationships, their mood, and the situational context. In human spatial interaction, partners share and sustain a space that is equally and exclusively reachable to them, the so-called interaction space. In a cooperative interaction scenario of relocating objects in interaction space, we introduce an approach for triggering and modulating a virtual humans cooperative spatial behavior by its degree of empathy with its interaction partner. That is, spatial distances like object distances as well as distances of arm and body movements while relocating objects in interaction space are modulated by the virtual human’s degree of empathy. In this scenario, the virtual human’s empathic emotion is generated as a hypothesis about the partner’s emotional state as related to the physical effort needed to perform a goal directed spatial behavior.}, address = {Berlin, Heidelberg}, author = {Boukricha, Hana and Nguyen, H.}, booktitle = {Proceedings of the 10th international conference on Intelligent virtual agents IVA'11}, doi = {10.1007/978-3-642-23974-8\_38}, editor = {Kopp, Stefan and Marsella, Stacy and Thorisson, Kristinn and Vilhjalmsson, Hannes}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Boukricha, Nguyen - 2011 - Sharing Emotions and Space – Empathy as a Basis for Cooperative Spatial Interaction.pdf:pdf}, pages = {350--362}, publisher = {Springer-Verlag}, title = {{Sharing Emotions and Space – Empathy as a Basis for Cooperative Spatial Interaction}}, url = {http://www.springerlink.com/content/q22784632u008337/}, year = {2011} } @inproceedings{Hernandez-Trapote2008, abstract = {In this article we present a research scheme which aims to analyze the use of Embodied Conversational Agent (ECA) technology to improve the robustness and acceptability of speaker enrolment and verification dialogues designed to provide secure access through natural and intuitive speaker recognition. In order to find out the possible effects of the visual information channel provided by the ECA, tests were carried out in which users were divided into two groups, each interacting with a different interface (metaphor): an ECA Metaphor group -with an ECA-, and a VOICE Metaphor group -without an ECA-. Our evaluation methodology is based on the ITU-T P.851 recommendation for spoken dialogue system evaluation, which we have complemented to cover particular aspects with regard to the two major extra elements we have incorporated: secure access and an ECA. Our results suggest that likeability-type factors and system capabilities are perceived more positively by the ECA metaphor users than by the VOICE metaphor users. However, the ECA’s presence seems to intensify users’ privacy concerns.}, address = {New York, New York, USA}, author = {Hern\'{a}ndez-Trapote, \'{A}lvaro and L\'{o}pez-Menc\'{\i}a, Beatriz and D\'{\i}az, David and Fern\'{a}ndez-Pozo, Rub\'{e}n and Caminero, Javier}, booktitle = {Proceedings of the 10th international conference on Multimodal interfaces - IMCI '08}, doi = {10.1145/1452392.1452454}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hern\'{a}ndez-Trapote et al. - 2008 - Embodied conversational agents for voice-biometric interfaces.pdf:pdf}, isbn = {9781605581989}, keywords = {Conversational Agent,Embodied biometrics interfaces,Experimentation,Human Factors,Multimodal evaluation,Security,Standardization,Verification.,voice authentication.}, pages = {305}, publisher = {ACM Press}, title = {{Embodied conversational agents for voice-biometric interfaces}}, url = {http://portal.acm.org/citation.cfm?doid=1452392.1452454}, year = {2008} } @article{O'Brien2008, abstract = {Increased emphasis on user experiences with technology demonstrates that systems must be not only usable, but engaging. Engagement, defined as a quality of user experience, is a multidimensional construct characterized by aesthetic appeal, novelty, perceived challenge, feedback and control, attention, motivation, and affect. To measure engagement, we developed a multidimensional instrument and surveyed 440 online shoppers to assess its reliability and construct validity. Results of exploratory factor analysis showed that engagement is comprised of six distinct factors: perceived usability, aesthetics, focused attention, involvement, novelty, and endurability.}, author = {O'Brien, Heather L. and Toms, Elaine G. and Kelloway, E. Kevin and Kelley, Elizabeth}, doi = {10.1002/meet.2008.1450450258}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/O'Brien et al. - 2008 - Developing and evaluating a reliable measure of user engagement.pdf:pdf}, journal = {Proceedings of the American Society for Information Science and Technology}, number = {1}, pages = {1--10}, title = {{Developing and evaluating a reliable measure of user engagement}}, url = {http://onlinelibrary.wiley.com/doi/10.1002/meet.2008.1450450258/full}, volume = {45}, year = {2008} } @article{Taigman2011, archivePrefix = {arXiv}, arxivId = {arXiv:1108.1122v1}, author = {Taigman, Yaniv and Wolf, Lior}, eprint = {arXiv:1108.1122v1}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Taigman, Wolf - 2011 - Leveraging Billions of Faces to Overcome Performance Barriers in Unconstrained Face Recognition.pdf:pdf}, journal = {Arxiv preprint arXiv:1108.1122}, number = {view 2}, pages = {1--7}, title = {{Leveraging Billions of Faces to Overcome Performance Barriers in Unconstrained Face Recognition}}, volume = {1}, year = {2011} } @inproceedings{Jaques2004, abstract = {In this paper we describe the use of mental states, more specifically the BDI approach, to implement the process of affective diagnosis in an educational environment. We use the OCC model, which is based on the cognitive theory of emotions and is possible to be implemented computationally, in order to infer the learner’s emotions from his actions in the system interface. The BDI approach is very adequate since the emotions have a dynamic nature. Besides, in our work we profit from the reasoning capacity of the BDI approach in order to infer the student’s appraisal, which allow us to deduce student’s emotions.}, address = {Puebla}, author = {Jaques, Patricia Augustin and Viccari, Rosa M}, booktitle = {IBERO-AMERICAN CONFERENCE ON ARTIFICIAL INTELLIGENCE (IBERAMIA)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Jaques, Viccari - 2004 - A BDI Approach to Infer Student's Emotions.pdf:pdf}, pages = {901--911}, publisher = {Springer-Verlag}, title = {{A BDI Approach to Infer Student's Emotions}}, year = {2004} } @article{Roberts1996, author = {Roberts, William and Strayer, Janet}, doi = {10.2307/1131826}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Roberts, Strayer - 1996 - Empathy, Emotional Expressiveness, and Prosocial Behavior.pdf:pdf}, issn = {00093920}, journal = {Child Development}, month = apr, number = {2}, pages = {449}, title = {{Empathy, Emotional Expressiveness, and Prosocial Behavior}}, url = {http://www.jstor.org/stable/1131826?origin=crossref}, volume = {67}, year = {1996} } @article{Baldauf2009, abstract = {In experimental studies using flight simulations subjects' duration estimates have shown to be an effective indicator of cognitive task demands. In this study we wanted to find out whether subjective time perception could serve as a measure of cognitive workload during simulated car driving. Participants drove on a round course of a driving simulator consisting of three different environments with different levels of task demands. Drivers were required to perform a time-production task while driving the vehicle. Electrodermal activity and subjective ratings of mental workload (SWAT) were recorded simultaneously. The length of produced intervals increased significantly in more complex driving situations, as did electrodermal activity and subjective ratings of mental workload. Thus, time production is a valid indicator of cognitive involvement in simulated driving and could become a valid method to measure the current mental workload of car drivers in various traffic situations.}, author = {Baldauf, Daniel and Burgard, Esther and Wittmann, Marc}, doi = {10.1016/j.apergo.2009.01.004}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Baldauf, Burgard, Wittmann - 2009 - Time perception as a workload measure in simulated car driving.pdf:pdf}, issn = {1872-9126}, journal = {Applied ergonomics}, keywords = {Adult,Analysis of Variance,Automobile Driving,Automobiles,Cognition,Female,Humans,Male,Middle Aged,Time Perception,Workload}, month = sep, number = {5}, pages = {929--35}, pmid = {19200943}, publisher = {Elsevier Ltd}, title = {{Time perception as a workload measure in simulated car driving.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19200943}, volume = {40}, year = {2009} } @incollection{Grammer2011, address = {Cambridge, UK}, author = {Grammer, K. and Tessarek, A. and Hofer, G.}, booktitle = {Face-to-face communication over the Internet}, editor = {Kappas, A. and Kramer, N.}, pages = {237--279}, publisher = {Cambridge University Press}, title = {{From emoticons to avatars: The simulation of facial expression}}, year = {2011} } @article{Drapeau2009, abstract = {Persons with dementia of the Alzheimer type (DAT) are impaired in recognizing emotions from face and voice. Yet clinical practitioners use these mediums to communicate with DAT patients. Music is also used in clinical practice, but little is known about emotional processing from music in DAT. This study aims to assess emotional recognition in mild DAT. Seven patients with DAT and 16 healthy elderly adults were given three tasks of emotional recognition for face, prosody, and music. DAT participants were only impaired in the emotional recognition from the face. These preliminary results suggest that dynamic auditory emotions are preserved in DAT.}, author = {Drapeau, Joanie and Gosselin, Nathalie and Gagnon, Lise and Peretz, Isabelle and Lorrain, Dominique}, doi = {10.1111/j.1749-6632.2009.04768.x}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Drapeau et al. - 2009 - Emotional recognition from face, voice, and music in dementia of the Alzheimer type.pdf:pdf}, issn = {1749-6632}, journal = {Annals of the New York Academy of Sciences}, keywords = {Aged,Alzheimer Disease,Alzheimer Disease: physiopathology,Alzheimer Disease: psychology,Emotions,Face,Female,Humans,Male,Music,Recognition (Psychology),Recognition (Psychology): physiology,Voice}, month = jul, pages = {342--5}, pmid = {19673804}, title = {{Emotional recognition from face, voice, and music in dementia of the Alzheimer type.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19673804}, volume = {1169}, year = {2009} } @article{Hingson2005, author = {Hingson, Ralph and Heeren, Timothy and Winter, Michael and Wechsler, Henry}, journal = {Journal of Studies on Alcohol and Drugs}, pages = {12--20}, title = {{MAGNITUDE OF ALCOHOL-RELATED MORTALITY AND MORBIDITY AMONG U.S. COLLEGE STUDENTS AGES 18–24: Changes from 1999 to 2005}}, url = {http://www.jsad.com/}, volume = {16}, year = {2009} } @book{Picard1997, address = {Cambridge, Massachusetts}, author = {Picard, Rosalind W.}, isbn = {0-262-16170-2}, publisher = {The MIT Press}, title = {{Affective Computing}}, year = {1997} } @article{Cappella1990, author = {Cappella, Joseph N.}, doi = {10.1207/s15327965pli0104\_5}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cappella - 1990 - On Defining Conversational Coordination and Rapport.pdf:pdf}, issn = {1047-840X}, journal = {Psychological Inquiry}, month = oct, number = {4}, pages = {303--305}, title = {{On Defining Conversational Coordination and Rapport}}, url = {http://www.tandfonline.com/doi/abs/10.1207/s15327965pli0104\_5}, volume = {1}, year = {1990} } @incollection{Brave2003, address = {New Jersey}, author = {Brave, Scott and Nass, Clifford}, booktitle = {The Human-Computer Interaction Handbook: Fundumentals, Evolving Technologies and Emerging Applications}, chapter = {4}, editor = {Jacko, Julie A. and Sears, Andrew}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Brave, Nass - 2003 - Emotion in human–computer interaction.pdf:pdf}, number = {Cmc}, pages = {53--68}, publisher = {Lawrence Erlbaum Associates}, title = {{Emotion in human–computer interaction}}, year = {2003} } @incollection{Tomkins1984, address = {Hillsdale, NJ}, author = {Tomkins, S S}, booktitle = {Approaches to emotion}, editor = {Scherer, Klaus R and Ekman, Paul}, isbn = {0898594065}, pages = {163--195}, publisher = {Erlbaum}, title = {{Affect theory}}, volume = {163}, year = {1984} } @phdthesis{Beutl2011, author = {Beutl, Leon}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Beutl - 2011 - A simulation for the creation of soft-looking, realistic facial expressions.pdf:pdf}, school = {University of Wien}, title = {{A simulation for the creation of soft-looking, realistic facial expressions}}, type = {Master Thesis}, year = {2011} } @article{Bellet1991, abstract = {IN HIS RESEARCH on the physician-patient relationship, Cousins1 found that 85\% of people had changed physicians or were thinking of changing in the past 5 years. Many of those who changed did so because of their physician's poor communication skills. One of the qualities of effective communication is the use of empathy. Because some physicians have not learned to use empathy in their training as medical students and residents, they may be ineffective in the care of patients.2 In this article, we discuss the importance of empathy in medical practice and illustrate its use with two examples.}, author = {Bellet, P S and Maloney, M J}, doi = {10.1001/jama.1991.03470130111039}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bellet, Maloney - 1991 - The importance of empathy as an interviewing skill in medicine.pdf:pdf}, issn = {0098-7484}, journal = {JAMA : the journal of the American Medical Association}, keywords = {Communication,Cost-Benefit Analysis,Empathy,Humans,Physician's Practice Patterns,Physician's Practice Patterns: economics,Physician-Patient Relations}, month = oct, number = {13}, pages = {1831--2}, pmid = {1909761}, title = {{The importance of empathy as an interviewing skill in medicine}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/1909761}, volume = {266}, year = {1991} } @article{Cliffordson2002, abstract = {The purpose of the present study was to examine the structure of empathy using a hierarchical approach, and to compare the dimensions of empathy with measures of social functioning, in order to contribute to the understanding of the nature of empathy. The dimensionality of the Interpersonal Reactivity Index, which comprises four subscales (empathic concern, perspective taking, fantasy and personal distress) was examined using confirmatory factor analysis. Relations with the Social Skills Inventory were also investigated. A sample of 127 applicants for places on nursing and social work undergraduate programs participated in the study. The study findings indicate that empathy is hierarchically organized, with one general dimension at the apex. The general factor is identical to empathic concern and this dimension overlaps to a great extent with perspective taking and fantasy. The findings also indicate that the general dimension constitutes an integrated entirety, with its main emphasis on emotional reactivity by also involving cognitive processes.}, author = {Cliffordson, Christina}, doi = {10.1111/1467-9450.00268}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Cliffordson - 2002 - The hierarchical structure of empathy dimensional organization and relations to social functioning.pdf:pdf}, issn = {0036-5564}, journal = {Scandinavian journal of psychology}, keywords = {Empathy,Factor Analysis,Humans,Social Behavior,Statistical}, month = feb, number = {1}, pages = {49--59}, pmid = {11885760}, title = {{The hierarchical structure of empathy: dimensional organization and relations to social functioning}}, url = {http://onlinelibrary.wiley.com/doi/10.1111/1467-9450.00268/abstract}, volume = {43}, year = {2002} } @inproceedings{Neviarouskaya2007, abstract = {In this paper, we address the tasks of recognition and interpretation of affect communicated through text messaging. The evolving nature of language in online conversations is a main issue in affect sensing from this media type, since sentence parsing might fail while syntactical structure analysis. The developed Affect Analysis Model was designed to handle not only correctly written text, but also informal messages written in abbreviated or expressive manner. The proposed rule-based approach processes each sentence in sequential stages, including symbolic cue processing, detection and transformation of abbreviations, sentence parsing, and word/phrase/sentence-level analyses. In a study based on 160 sentences, the system result agrees with at least two out of three human annotators in 70\% of the cases. In order to reflect the detected affective information and social behaviour, an avatar was created.}, address = {Lisbon, Portugal}, author = {Neviarouskaya, Alena and Prendinger, Helmut and Ishizuka, Mitsuru}, booktitle = {2nd International Conference in Affective Computing and Intelligent Interaction (ACII)}, editor = {Paiva, Ana and Prada, R. and Picard, Rosalind W}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Neviarouskaya, Prendinger, Ishizuka - 2007 - Textual Affect Sensing for Sociable and Expressive Online Communication.pdf:pdf}, keywords = {affective sensing from text,affective user interface,avatar,emotions,language parsing and understanding,online communication,text analysis}, pages = {220--231}, publisher = {Springer-Verlag Berlin Heidelber}, title = {{Textual Affect Sensing for Sociable and Expressive Online Communication}}, year = {2007} } @inproceedings{Rodrigues2009, address = {Porto, Portugal}, author = {Rodrigues, SH and Mascarenhas, SF}, booktitle = {Affective Computing and Intelligent Interaction and Workshops (ACII'2009)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rodrigues, Mascarenhas - 2009 - “ I can feel it too !” Emergent empathic reactions between synthetic characters.pdf:pdf}, isbn = {9781424447992}, pages = {1--7}, publisher = {IEEE}, title = {{“ I can feel it too !”: Emergent empathic reactions between synthetic characters}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5349570}, year = {2009} } @article{Zachariae2003, abstract = {The aim of the study was to investigate the association of physician communication behaviours as perceived by the patient with patient reported satisfaction, distress, cancer-related self-efficacy, and perceived control over the disease in cancer patients. Questionnaires measuring distress, self-efficacy, and perceived control were completed prior to and after the consultation by 454 patients attending an oncology outpatient clinic. After the consultation, the patients also rated the physicians' communicative behaviours by completing a patient-physician relationship inventory (PPRI), and the physicians were asked to estimate patient satisfaction. The overall results showed that higher PPRI scores of physician attentiveness and empathy were associated with greater patient satisfaction, increased self-efficacy, and reduced emotional distress following the consultation. In contrast, lower PPRI scores were associated with reduced ability of the physician to estimate patient satisfaction. The results confirm and expand previous findings, suggesting that communication is a core clinical skill in oncology.}, author = {Zachariae, R and Pedersen, C G and Jensen, A B and Ehrnrooth, E and Rossen, P B and {Von Der Maase}, H}, institution = {Psychooncology Research Unit, Aarhus University Hospital, Denmark. bzach@akh.aaa.dk}, journal = {British Journal of Cancer}, keywords = {humans,neoplasms,neoplasms psychology,patient satisfaction,physician patient relations,psychological,questionnaires,self efficacy,stress}, number = {5}, pages = {658--665}, publisher = {Nature Publishing Group}, title = {{Association of perceived physician communication style with patient satisfaction, distress, cancer-related self-efficacy, and perceived control over the disease.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=2376357\&tool=pmcentrez\&rendertype=abstract}, volume = {88}, year = {2003} } @inproceedings{Bickmore2007, abstract = {Interactions in which computer agents comfort users through expressed empathy have been shown to be important in alleviating user frustration and increasing user liking of the agent, and may have important healthcare applications. Given the current state of technology, designers of these systems are forced to choose between (a) allowing users to freely express their feelings, but having the agents provide imperfect empathic responses, or (b) greatly restricting how users can express themselves, but having the agents provide very accurate empathic feedback. This study investigates which of these options leads to better outcomes, in terms of comforting users and increasing user-agent social bonds. Results, on almost all measures, indicate that empathic accuracy is more important than user expressivity.}, address = {San Jose, California, USA}, author = {Bickmore, Timothy Wallace and Schulman, Daniel}, booktitle = {Preceeding of ACM CHI 2007 Conference on Human Factors in Computing Systems}, doi = {10.1145/1240866.1240996}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bickmore, Schulman - 2007 - Practical approaches to comforting users with relational agents.pdf:pdf}, isbn = {9781595936424}, keywords = {affective computing,caring,comforting,embodied conversational agent,relational agent,social interface}, pages = {2291--2296}, publisher = {ACM}, title = {{Practical approaches to comforting users with relational agents}}, url = {http://dl.acm.org/citation.cfm?id=1240996}, year = {2007} } @article{Maurer1983, author = {Maurer, R.E. and Tindall, J.H.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Maurer, Tindall - 1983 - Effect of postural congruence on client's perception of counselor empathy.pdf:pdf}, journal = {Journal of Counseling Psychology}, number = {2}, pages = {158}, publisher = {American Psychological Association}, title = {{Effect of postural congruence on client's perception of counselor empathy.}}, volume = {30}, year = {1983} } @inproceedings{DeCarolis2002, abstract = {Developing an embodied conversational agent that is able to exhibit a human-like behavior while communicating with other virtual or human agents requires enriching a typical NLG architecture. The purpose of this paper is to describe our efforts in this direction and to illustrate our approach to the generation of an Agent that intelligence shows a personality, a social and is able to react emotionally to events occurring in the environment, consistently with her goals and with the context in which the conversation takes place.}, address = {New York, USA}, author = {{De Carolis}, Berardina and Carofiglio, Valeria and Pelachaud, Catherine}, booktitle = {Proceedings of the 2nd International Conference on Natural Language Generation (INLG 2002)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/De Carolis, Carofiglio, Pelachaud - 2002 - From discourse plans to believable behavior generation.pdf:pdf}, title = {{From discourse plans to believable behavior generation}}, url = {http://www.cs.rutgers.edu/~mdstone/inlg02/154.pdf}, year = {2002} } @incollection{Davis2006, address = {New York}, author = {Davis, Mark H.}, booktitle = {Handbook of the Socialogy of Emotions}, editor = {Stets, J. and Turner, J.}, publisher = {Springer Press}, title = {{Empathy}}, year = {2006} } @article{Riek2008, abstract = {Expressing empathy is a key component of human social communication. One common way people convey empathy is via facial expression mirroring. It may be helpful for machines intended to interact with people to also convey empathy in this manner. We have thus created Virgil, an expression-mimicking robot. We hypothesize that if people feel like a machine is empathizing with them they will be more likely to rate the interaction positively. We conducted a pilot study to test our hypothesis, and through quantitative and qualitative analysis of our results found some support for it.}, author = {Riek, Laurel D. and Robinson, Peter}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Riek, Robinson - 2008 - Real-time empathy Facial mimicry on a robot.pdf:pdf}, journal = {ACM Workshop on Affective Interaction in Natural Environments AFFINE at the International ACM Conference on Multimodal Interfaces ICMI 08}, pages = {1--5}, publisher = {ACM}, title = {{Real-time empathy: Facial mimicry on a robot}}, year = {2008} } @article{Dunn2001, author = {Dunn, C. and Deroo, L and Rivara, FP}, journal = {Addiction}, number = {12}, pages = {1725--42}, title = {{The use of brief interventions adapted from motivational interviewing across behavioral domains: a systematic review}}, volume = {96}, year = {2001} } @inproceedings{Krauss1996, address = {San Diego, CA, US}, author = {Krauss, Robert M. and Chen, Yihsiu and Chawla, Purnima}, booktitle = {Advances in experimental social psychology}, doi = {10.1016/S0065-2601(08)60241-5}, editor = {Zanna, Mark P.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Krauss, Chen, Chawla - 1996 - Nonverbal behavior and nonverbal communication What do conversational hand gestures tell us.pdf:pdf}, pages = {389--450}, publisher = {Academic Press}, title = {{Nonverbal behavior and nonverbal communication: What do conversational hand gestures tell us?}}, url = {http://www.sciencedirect.com/science/article/pii/S0065260108602415}, volume = {28}, year = {1996} } @article{Meijer1989, abstract = {The present study was designed to assess the contribution of general features of gross body movements to the attribution of emotions. Eighty-five adult subjects were shown ninety-six videotaped body movements, performed by three actors. Each movement was determined by seven general dimensions: trunk movement, arm movement, vertical direction, sagittal direction, force, velocity and directness. Using rating scales, the subjects judged the compatibility of each movement with each of twelve emotion categories. The results showed which movement features predicted particular ratings. Emotion categories differed as to the amount, type, and weights of predicting movement features. Three factors were extracted from the original ratings and interpreted as Rejection-Acceptance, Withdrawal-Approach, and Preparation-Defeatedness.}, author = {Meijer, Marco}, doi = {10.1007/BF00990296}, issn = {01915886}, journal = {Journal of Nonverbal Behavior}, number = {4}, pages = {247--268}, publisher = {Springer}, title = {{The contribution of general features of body movement to the attribution of emotions}}, url = {http://www.springerlink.com/index/10.1007/BF00990296}, volume = {13}, year = {1989} } @article{Kaliouby2005, author = {Kaliouby, R. and Robinson, Peter}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kaliouby, Robinson - 2005 - Real-time inference of complex mental states from facial expressions and head gestures.pdf:pdf}, journal = {Real-time vision for human-computer interaction}, pages = {181--200}, publisher = {Springer}, title = {{Real-time inference of complex mental states from facial expressions and head gestures}}, url = {http://www.springerlink.com/index/K822871338R66039.pdf}, year = {2005} } @inproceedings{Ishii2010, abstract = {In face-to-face conversations, speakers are continuously checking whether the listener is engaged in the conversation by monitoring the partner’s eye-gaze behaviors. In this study, focusing on eye-gaze as information of estimating user’s conversational engagement, Wizard-of-Oz experiment first, we to collect the user’s conduct a gaze behaviors as well as the user’s subjective reports and an observer’s judgment concerning the user’s engagement in the conversation. Then, by analyzing the user’s gaze behaviors, variables and factors for estimating the user’s engagement are identified. Based on the analysis, we propose four types of engagement estimation methods based on gaze duration information and gaze transition 3- gram patterns. As the results of comparing the performance of these methods, it is revealed that a method which takes account of the individual differences in gaze transition patterns performs the best and can predict the user’s conversational engagement quite well.}, address = {Hong Kong}, author = {Ishii, Ryo and Yukiko, I. Nakano}, booktitle = {EGIHMI '10 Proceedings of the 2010 workshop on Eye gaze in intelligent human machine interaction}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ishii, Yukiko - 2010 - An Empirical Study of Eye-gaze Behaviors Towards the Estimation of Conversational Engagement in Human-Agent Comm.pdf:pdf}, isbn = {9781605589992}, keywords = {Empirical study,Wizard-of-Oz experiment,conversational engagement}, pages = {33--40}, publisher = {ACM}, title = {{An Empirical Study of Eye-gaze Behaviors : Towards the Estimation of Conversational Engagement in Human-Agent Communication}}, year = {2010} } @book{Fellbaum1998, address = {Cambridge, MA}, author = {Fellbaum, Christiane}, publisher = {MIT Press}, title = {{WordNet: An Electronic Lexical Database}}, year = {1998} } @inproceedings{Schulman2011, abstract = {We present a conversational agent designed as a virtual counselor for health behavior change. The incorporates techniques drawn from agent Motivational Interviewing to enhance client motivation and confidence to change; these techniques are modeled and implemented based on a domain-specific taxonomy of dialogue acts. We discuss the design and preliminary evaluation of the agent.}, author = {Schulman, Daniel and Bickmore, Timothy Wallace and Sidner, Candace L}, booktitle = {Association for the Advancement of Artificial Intelligence (AAAI) Spring Symposium Series}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Schulman, Bickmore, Sidner - 2011 - An Intelligent Conversational Agent for Promoting Long-Term Health Behavior Change using Motivationa.pdf:pdf}, pages = {61--64}, publisher = {Association for the Advancement of Artificial Intelligence (www.aaai.org)}, title = {{An Intelligent Conversational Agent for Promoting Long-Term Health Behavior Change using Motivational Interviewing}}, year = {2011} } @article{Bavelas1986, abstract = {Elementary motor mimicry (e.g., wincing when another is injured) has been previously considered in social psychology as the overt manifestation of some intrapersonal process such as vicarious emotion. A 2-part experiment with 50 university students tested the hypothesis that motor mimicry is instead an interpersonal event, a nonverbal communication intended to be seen by the other. Part 1 examined the effect of a receiver on the observer's motor mimicry. The victim of an apparently painful injury was either increasingly or decreasingly available for eye contact with the observer. Microanalysis showed that the pattern and timing of the observer's motor mimicry were significantly affected by the visual availability of the victim. In Part 2, naive decoders viewed and rated the reactions of these observers. Their ratings confirmed that motor mimicry was consistently decoded as "knowing" and "caring" and that these interpretations were significantly related to the experimental condition under which the reactions were elicited. Results cannot be explained by any alternative intrapersonal theory, so a parallel process model is proposed in which the eliciting stimulus may set off both internal reactions and communicative responses, and it is the communicative situation that determines the visable behavior. (37 ref) (PsycINFO Database Record (c) 2010 APA, all rights reserved)}, author = {Bavelas, Janet Beavin and Black, Alex and Lemery, Charles R. and Mullett, Jennifer}, doi = {10.1037/0022-3514.50.2.322}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bavelas et al. - 1986 - I show how you feel - Motor mimicry as a communicative act.pdf:pdf}, journal = {Journal of Personality and Social Psychology}, number = {2}, pages = {322--329}, title = {{"I show how you feel" - Motor mimicry as a communicative act}}, url = {http://psycnet.apa.org/journals/psp/50/2/322/}, volume = {50}, year = {1986} } @article{Foster2008, abstract = {Humans are known to use a wide range of non-verbal behaviour while speaking. Generating naturalistic embodied speech for an artificial agent is therefore an application where techniques that draw directly on recorded human motions can be helpful. We present a system that uses corpus-based selection strategies to specify the head and eyebrow motion of an animated talking head. We first describe how a domain-specific corpus of facial displays was recorded and annotated, and outline the regularities that were found in the data. We then present two different methods of selecting motions for the talking head based on the corpus data: one that chooses the majority option in all cases, and one that makes a weighted choice among all of the options. We compare these methods to each other in two ways: through cross-validation against the corpus, and by asking human judges to rate the output. The results of the two evaluation studies differ: the cross-validation study favoured the majority strategy, while the human judges preferred schedules gene- rated using weighted choice. The judges in the second study also showed a preference for the original corpus data over the output of either of the generation strategies.}, author = {Foster, Mary Ellen and Oberlander, Jon}, doi = {10.1007/s10579-007-9055-3}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Foster, Oberlander - 2008 - Corpus-based generation of head and eyebrow motion for an embodied conversational agent.pdf:pdf}, issn = {1574-020X}, journal = {Language Resources and Evaluation}, keywords = {Data-driven generation,Embodied conversational agents,Evaluation of generated output,Multimodal corpora}, month = feb, number = {3-4}, pages = {305--323}, title = {{Corpus-based generation of head and eyebrow motion for an embodied conversational agent}}, url = {http://www.springerlink.com/index/10.1007/s10579-007-9055-3}, volume = {41}, year = {2008} } @book{Buck1984, address = {New York}, author = {Buck, R.}, publisher = {Guilford Press}, title = {{The communication of emotion}}, year = {1984} } @inproceedings{Hlavacshelmut2012, address = {Vienna, Austria}, author = {Helmut, Hlavacs and Leon, Beutl}, booktitle = {3rd International Symposium on Facial Analysis and Animation}, publisher = {ACM}, title = {{A Simulation for the Creation of Soft-Looking Facial Expressions}}, year = {2012} } @article{Tom1991, abstract = {Reasoning that nodding head movements up and down serve a mnemesic function of positive thoughts and feelings and shaking head movements from side to side serve a mnemesic function of negative thoughts and feelings, this study determined that nodding head movements resulted in the establishment of increased preference for a neutral object, whereas shaking head movements lead to a decline in preference for the neutral object. The findings suggest that overt head movement is instrumental in the formation of preference and focuses attention on the importance of the somatic component of attitude.}, author = {Tom, Gail and Pettersen, Paul and Lau, Teresa and Burton, Trevor and Cook, Jim}, doi = {10.1207/s15324834basp1203\_3}, issn = {01973533}, journal = {Basic and Applied Social Psychology}, number = {3}, pages = {281--289}, publisher = {Psychology Press}, title = {{The Role of Overt Head Movement in the Formation of Affect}}, url = {http://www.informaworld.com/openurl?genre=article\&doi=10.1207/s15324834basp1203\_3\&magic=crossref}, volume = {12}, year = {1991} } @inproceedings{Stone2004, abstract = {We describe a method for using a database of recorded speech and captured motion to create an animated conversational character. People's utterances are composed of short, clearly-delimited phrases; in each phrase, gesture and speech go together meaningfully and synchronize at a common point of maximum emphasis. We develop tools for collecting and managing performance data that exploit this structure. The tools help create scripts for performers, help annotate and segment performance data, and structure specific messages for characters to use within application contexts. Our animations then reproduce this structure. They recombine motion samples with new speech samples to recreate coherent phrases, and blend segments of speech and motion together phrase-by-phrase into extended utterances. By framing problems for utterance generation and synthesis so that they can draw closely on a talented performance, our techniques support the rapid construction of animated characters with rich and appropriate expression.}, author = {Stone, Matthew and DeCarlo, Doug and Oh, Insuk and Rodriguez, Christian and Stere, Adrian and Lees, Alyssa and Bregler, Chris}, booktitle = {SIGGRAPH 04 ACM SIGGRAPH 2004 Papers}, doi = {10.1145/1186562.1015753}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Stone et al. - 2004 - Speaking with hands creating animated conversational characters from recordings of human performance.pdf:pdf}, issn = {07300301}, number = {3}, pages = {506--513}, publisher = {ACM Press}, title = {{Speaking with hands: creating animated conversational characters from recordings of human performance}}, url = {http://portal.acm.org/citation.cfm?doid=1015706.1015753}, volume = {23}, year = {2004} } @inproceedings{Kang2008a, author = {Kang, Sin-hwa and Gratch, Jonathan and Wang, Ning and Watt, J.}, booktitle = {Intelligent Virtual Agents}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kang et al. - 2008 - Agreeable people like agreeable virtual humans.pdf:pdf}, keywords = {evaluation,nonverbal feedback,personality,rapport,virtual agents}, pages = {253--261}, publisher = {Springer}, title = {{Agreeable people like agreeable virtual humans}}, url = {http://www.springerlink.com/index/DT61V8556710VW13.pdf}, year = {2008} } @article{Moreno2006, abstract = {College students learned about science with a multimedia program. One group (choice or C) chose to learn with or without an animated pedagogical agent (APA) representing a male or female of Wve diVerent ethnicities. Another group (no-choice or NC) was assigned an APA by the system. All participants in C group chose to learn with APAs and students of color chose signiWcantly more same-ethnicity APAs than White American students. A signiWcant interaction between choice and ethnic similarity factors revealed that group C produced lower retention, transfer, and program ratings when learning with same-ethnicity rather than diVerent-ethnicity APAs. Results support an interference hypothesis for students who choose to learn with same-ethnicity APAs.}, author = {Moreno, Roxana and Flowerday, Terri}, doi = {10.1016/j.cedpsych.2005.05.002}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Moreno, Flowerday - 2006 - Students’ choice of animated pedagogical agents in science learning A test of the similarity-attraction hypot.pdf:pdf}, issn = {0361476X}, journal = {Contemporary Educational Psychology}, keywords = {animated pedagogical agents,avect,choice,ethnicity,gender,learning,multimedia,science,similarity-attraction}, month = apr, number = {2}, pages = {186--207}, title = {{Students’ choice of animated pedagogical agents in science learning: A test of the similarity-attraction hypothesis on gender and ethnicity}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0361476X05000317}, volume = {31}, year = {2006} } @article{Blair2003, abstract = {Human emotional expressions serve a crucial communicatory role allowing the rapid transmission of valence information from one individual to another. This paper will review the literature on the neural mechanisms necessary for this communication: both the mechanisms involved in the production of emotional expressions and those involved in the interpretation of the emotional expressions of others. Finally, reference to the neuro-psychiatric disorders of autism, psychopathy and acquired sociopathy will be made. In these conditions, the appropriate processing of emotional expressions is impaired. In autism, it is argued that the basic response to emotional expressions remains intact but that there is impaired ability to represent the referent of the individual displaying the emotion. In psychopathy, the response to fearful and sad expressions is attenuated and this interferes with socialization resulting in an individual who fails to learn to avoid actions that result in harm to others. In acquired sociopathy, the response to angry expressions in particular is attenuated resulting in reduced regulation of social behaviour.}, author = {Blair, R J R}, issn = {09628436}, journal = {Philosophical Transactions of the Royal Society of London Series BBiological Sciences}, keywords = {amygdala,autism,autistic children,bilateral amygdala damage,communication,emotional faces,facial expressions,fusiform face area,human brain,prefrontal cortex,psychopath,selective impairment,smile production,social information,temporal visual cortex}, number = {1431}, pages = {561--572}, title = {{Facial expressions, their communicatory functions and neuro-cognitive substrates}}, volume = {358}, year = {2003} } @inproceedings{Zanbaka2007, abstract = {Do human-human social interactions carry over to human- virtual human social interactions? How does this affect future interface designers? We replicated classical tests of social influence known as the social facilitation and inhibition effects. Social facilitation/inhibition theory states that when in the presence of others, people perform simple tasks better and complex tasks worse. Participants were randomly assigned to perform both simple and complex tasks alone and in the presence of either a real human, a projected virtual human, or a virtual human in a head- mounted display. Our results showed participants were inhibited by the presence of others, whether real or virtual. That is, participants performed worse on the complex task, both in terms of percent correct and reaction times, when in the presence of others than when alone. Social facilitation did not occur with the real or virtual human. We discuss these results and their implications for future interface designers.}, address = {San Jose, California, USA}, author = {Zanbaka, Catherine and Ulinski, Amy and Goolkasian, Paula and Hodges, Larry F}, booktitle = {CHI 2007 Proceedings of Social Influence}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Zanbaka et al. - 2007 - Social Responses to Virtual Humans Implications for Future Interface Design.pdf:pdf}, isbn = {9781595935939}, keywords = {Virtual humans,avatars,experimental studies,human-computer interaction,interface agents,social facilitation and inhibition,social influence,social psychology.}, pages = {1561--1570}, publisher = {ACM}, title = {{Social Responses to Virtual Humans : Implications for Future Interface Design}}, year = {2007} } @article{Hester2005, abstract = {Sixty-one problem drinkers were randomly assigned to either immediate treatment or a 4-week wait-list control group. Treatment consisted of a computer-based brief motivational intervention, the Drinker's Check-up (DCU). Outcomes strongly support the experimental hypotheses and long-term effectiveness of the treatment. Overall, participants reduced the quantity and frequency of drinking by 50\%, and had similar reductions in alcohol-related problems that were sustained through 12-month follow-up. The DCU seems to be effective in enhancing problem drinkers' motivation for change.}, author = {Hester, Reid K and Squires, Daniel D and Delaney, Harold D}, journal = {Journal of Substance Abuse Treatment}, keywords = {adult,alcohol drinking,alcoholism,blood,computer assisted,epidemiology,ethanol,female,follow up studies,humans,male,middle aged,motivation,outcome assessment (health care),patient compliance,patient dropouts,personality assessment,psychometrics,px [psychology],rh [rehabilitation],sn [statistics \&,sn [statistics \& numer,sn [statistics \& numerical,sn [statistics \& numerical data,sn [statistics \& numerical data],software,therapy,united states,waiting lists}, number = {2}, pages = {159--169}, pmid = {15780546}, publisher = {Research Division, Behavior Therapy Associates, LLP, Albuquerque, NM 87112, USA. reidhester@behaviortherapy.com}, title = {{The Drinker's Check-up: 12-month outcomes of a controlled clinical trial of a stand-alone software program for problem drinkers}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/15780546}, volume = {28}, year = {2005} } @book{Ortony1988, abstract = {What causes us to experience emotions? What makes emotions vary in intensity? How are different emotions related to one another and to the language used to talk about them? What are the information processing mechanisms and structures that underlie the elicitation and intensification of emotions? Despite an abundance of psychological research on emotions, many fundamental questions like these have yet to be answered. The Cognitive Structure of Emotions addresses such questions by presenting a systematic and detailed account of the cognitive antecedents of emotions. The authors propose three aspects of the world to which people can react emotionally. People can react to events of concern to them, to the actions of those they consider responsible for such events, and to objects. It is argued that these three classes of reactions lead to three classes of emotions, each based on evaluations in terms of different kinds of knowledge representations. The authors characterize a wide range of emotions, offering concrete proposals about the factors that influence the intensity of each. In doing so, they forge a clear separation between emotions themselves and the language of emotion, and offer the first systematic, comprehensive, and computationally tractable account of the cognitions that underlie distinct types of human emotions.}, address = {Cambridge, UK}, author = {Ortony, A and Clore, G L and Collins, A}, booktitle = {Contemporary Sociology}, doi = {10.1016/0004-3702(92)90091-B}, isbn = {0521353645}, number = {6}, pages = {957}, publisher = {Cambridge University Press}, title = {{The Cognitive Structure of Emotions}}, volume = {18}, year = {1988} } @article{Stewart1995, abstract = {OBJECTIVE: To ascertain whether the quality of physician-patient communication makes a significant difference to patient health outcomes. DATA SOURCES: The MEDLINE database was searched for articles published from 1983 to 1993 using "physician-patient relations" as the primary medical subject heading. Several bibliographies and conference proceedings were also reviewed. STUDY SELECTION: Randomized controlled trials (RCTs) and analytic studies of physician-patient communication in which patient health was an outcome variable. DATA EXTRACTION: The following information was recorded about each study: sample size, patient characteristics, clinical setting, elements of communication assessed, patient outcomes measured, and direction and significance of any association found between aspects of communication and patient outcomes. DATA SYNTHESIS: Of the 21 studies that met the final criteria for review, 16 reported positive results, 4 reported negative (i.e., nonsignificant) results, and 1 was inconclusive. The quality of communication both in the history-taking segment of the visit and during discussion of the management plan was found to influence patient health outcomes. The outcomes affected were, in descending order of frequency, emotional health, symptom resolution, function, physiologic measures (i.e., blood pressure and blood sugar level) and pain control. CONCLUSIONS: Most of the studies reviewed demonstrated a correlation between effective physician-patient communication and improved patient health outcomes. The components of effective communication identified by these studies can be used as the basis both for curriculum development in medical education and for patient education programs. Future research should focus on evaluating such educational programs.}, author = {Stewart, M A}, institution = {Thames Valley Family Practice Research Unit, Centre for Studies in Family Medicine, University of Western Ontario, London.}, journal = {CMAJ Canadian Medical Association journal journal de lAssociation medicale canadienne}, number = {9}, pages = {1423--1433}, publisher = {Canadian Medical Association}, title = {{Effective physician-patient communication and health outcomes: a review.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=1337906\&tool=pmcentrez\&rendertype=abstract}, volume = {152}, year = {1995} } @article{Florida2003, abstract = {In this article, we describe a new approach to enhance presence technologies. First we discuss the strong relationship between cognitive processes and emotions and how human physiology is uniquely affected when experiencing each emotion. Then we introduce our prototype Multimodal Affective User Interface. In the remaining of the paper we describe the emotion elicitation experiment we designed and conducted and the algorithms we implemented to analyze the physiological signals associated with emotions. These algorithms can then be used to recognize the affective states of users from physiological data collected via non-invasive technologies. The affective intelligent user interfaces we plan to create will adapt to user affect dynamically in the current context, thus providing enhanced social presenc e.}, author = {Nasoz, Fatma and Alvarez, Kaye and Lisetti, Christine L. and Finkelstein, Neal}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Nasoz et al. - 2003 - Emotion Recognition from Physiological Signals for Presence Technologies.pdf:pdf}, journal = {International Journal of Cognition, Technology, and Work - Special Issue on Presence}, number = {1}, title = {{Emotion Recognition from Physiological Signals for Presence Technologies}}, volume = {6}, year = {2003} } @article{Tartaro2008, abstract = {In this paper, we describe an intervention for children with social and communication deficits, such as autism, based on the use of a virtual peer that can engage in tightly collaborative narrative. We present a study in which children with autism engage in collaborative narrative with both a virtual and a human peer, and the use of contingent discourse is compared. Our findings suggest that contingent discourse increased over the course of interaction with a virtual peer, but not a human peer. Furthermore, topic management, such as introducing new topics or maintaining the current topic, was more likely to occur with the virtual peer than with the human peer. We discuss general implications of our work for understanding the role of peer interactions in learning.}, author = {Tartaro, Andrea and Cassell, Justine}, journal = {Analysis}, pages = {382--389}, publisher = {International Society of the Learning Sciences}, title = {{Playing with Virtual Peers : Bootstrapping Contingent Discourse in Children with Autism}}, url = {http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=pubmed\&cmd=Retrieve\&dopt=AbstractPlus\&list\_uids=5027430324945433305related:2TJmez4FxUUJ}, volume = {2}, year = {2008} } @article{DeRosis2006a, abstract = {We propose a theory of a-rational persuasion in which we integrate emotional and non emotional strategies by arguing that they both imply reasoning and planning abilities in the two participants. We show some examples of texts from a corpus of persuasion messages in the healthy eating domain and propose a formalism to represent this knowledge. The final goal of our research is to simulate user-adapted persuasion dialogs about healthy eating.}, author = {{De Rosis}, Fiorella and Mazzotta, Irene and Miceli, Maria and Poggi, Isabella}, doi = {10.1007/11755494\_12}, journal = {Persuasive Technology}, pages = {84--95}, publisher = {Springer}, title = {{Persuasion Artifices to Promote Wellbeing}}, url = {http://www.springerlink.com/index/u78430771uh35637.pdf}, year = {2006} } @article{Jacob2011, author = {Jacob, Pierre}, doi = {10.1007/s13164-011-0065-0}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Jacob - 2011 - The Direct-Perception Model of Empathy a Critique.pdf:pdf}, issn = {1878-5158}, journal = {Review of Philosophy and Psychology}, month = aug, number = {August}, pages = {519--540}, title = {{The Direct-Perception Model of Empathy: a Critique}}, url = {http://www.springerlink.com/index/10.1007/s13164-011-0065-0}, year = {2011} } @book{Hatfield1994a, address = {Cambridge}, author = {Hatfield, Elaine and Cacioppo, J. T. and Rapson, R. L.}, publisher = {Cambridge University Press}, title = {{Emotional contagion}}, year = {1994} } @article{Liu2005, author = {Liu, Zhen}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Liu - 2005 - An emotion model of 3D virtual characters in intelligent virtual environment.pdf:pdf}, journal = {Affective Computing and Intelligent Interaction}, title = {{An emotion model of 3D virtual characters in intelligent virtual environment}}, url = {http://www.springerlink.com/index/p232574rm3036237.pdf}, year = {2005} } @article{Busso2004, author = {Busso, Carlos and Deng, Zhigang and Yildirim, Serdar and Bulut, Murtaza}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Busso et al. - 2004 - Analysis of emotion recognition using facial expressions, speech and multimodal information.pdf:pdf}, isbn = {1581138903}, journal = {ICMI'04}, pages = {1--7}, title = {{Analysis of emotion recognition using facial expressions, speech and multimodal information}}, url = {http://dl.acm.org/citation.cfm?id=1027968}, year = {2004} } @article{Strecher1986, abstract = {The concept of self-efficacy is receiving increasing recognition as a predictor of health behavior change and maintenance. The purpose of this article is to facilitate a clearer understanding of both the concept and its relevance for health education research and practice. Self-efficacy is first defined and distinguished from other related concepts. Next, studies of the self-efficacy concept as it relates to health practices are examined. This review focuses on cigarette smoking, weight control, contraception, alcohol abuse and exercise behaviors. The studies reviewed suggest strong relationships between self-efficacy and health behavior change and maintenance. Experimental manipulations of self-efficacy suggest that efficacy can be enhanced and that this enhancement is related to subsequent health behavior change. The findings from these studies also suggest methods for modifying health practices. These methods diverge from many of the current, traditional methods for changing health practices. Recommendations for incorporating the enhancement of self-efficacy into health behavior change programs are made in light of the reviewed findings.}, author = {Strecher, V J and DeVellis, B M and Becker, M H and Rosenstock, I M}, journal = {Health Education Quarterly}, number = {1}, pages = {73--92}, pmid = {3957687}, publisher = {Sage Publications}, title = {{The role of self-efficacy in achieving health behavior change.}}, url = {http://heb.sagepub.com/cgi/doi/10.1177/109019818601300108}, volume = {13}, year = {1986} } @inproceedings{Zwann2012, abstract = {[1] J. M. van der Zwaan, V. Dignum, and C. M. Jonker, “A BDI Dialogue Agent for Social Support : Specification of Verbal Support Types ( Extended Abstract ) Categories and Subject Descriptors,” pp. 1183–1184.}, address = {Valencia, Spain}, author = {van der Zwaan, J.M. and Dignum, V. and Jonker, C.M.}, booktitle = {Proceedings of the 11th International Conference on Autonomous Agents and Multiagent Systems (AAMAS2012)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Zwaan, Dignum, Jonker - 2012 - A BDI Dialogue Agent for Social Support Specification of Verbal Support Types ( Extended Abstract ) Cate.pdf:pdf}, keywords = {behavior,conversational agents,modeling cognition and socio-cultural,verbal and non-verbal expression}, pages = {1183--1184}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems (www.ifaamas.org)}, title = {{A BDI Dialogue Agent for Social Support : Specification of Verbal Support Types ( Extended Abstract ) Categories and Subject Descriptors}}, year = {2012} } @article{Hojat2003, author = {Hojat, Mohammadreza and Gonnella, J S and Mangione, Salvatore and Nasca, Thomas J and Magee, Mike}, journal = {Seminars in Integrative Medicine}, publisher = {Seminars in Integrative Medicine}, title = {{Pshysician empathy in medical education practice experience with the jefferson scale of physician empathy}}, year = {2003} } @article{Wojde2005, author = {Wojdeł, A and Rothkrantz, L.J.M.}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wojdeł, Rothkrantz - 2005 - Parametric generation of facial expressions based on FACS.pdf:pdf}, journal = {Computer Graphics Forum}, keywords = {acm ccs,animation,computer graphics,data-driven modelling,facial animation,facs,fuzzy logic,i}, number = {4}, pages = {743--757}, title = {{Parametric generation of facial expressions based on FACS}}, url = {http://onlinelibrary.wiley.com/doi/10.1111/j.1467-8659.2005.00899.x/full}, volume = {24}, year = {2005} } @inproceedings{Pontier2008, abstract = {Previous research indicates that self-help therapy is an effective method to prevent and treat unipolar depression. While web-based self-help therapy has many advantages, there are also disadvantages to self-help therapy, such as that it misses the possibility to regard the body language of the user, and the lack of personal feedback on the user responses. This study presents a virtual agent that guides the user through the Beck Depression Inventory (BDI) questionnaire, which is used to measure the severity of depression. The agent responds empathically to the answers given by the user, by changing its facial expression. This resembles face to face therapy more than existing web-based self-help therapies. A pilot experiment indicates that the virtual agent has added value for this application.}, author = {Pontier, Matthijs and Siddiqui, Ghazanfar F}, booktitle = {Proceedings of the 8th international conference on Intelligent Virtual Agents (IVA)}, doi = {10.1007/978-3-540-85483-8\_42}, editor = {{H. Prendinger, J. Lester}, and M. Ishizuka}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pontier, Siddiqui - 2008 - A Virtual Therapist That Responds Empathically to Your Answers.pdf:pdf}, keywords = {Emotion modeling,Self-help therapy,Virtual agent}, pages = {417--425}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{A Virtual Therapist That Responds Empathically to Your Answers}}, year = {2008} } @article{Gratch2006, author = {Gratch, Jonathan and Okhmatovskaia, Anna and Lamothe, Francois}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Gratch, Okhmatovskaia, Lamothe - 2006 - Virtual rapport.pdf:pdf}, journal = {Intelligent Virtual}, title = {{Virtual rapport}}, url = {http://www.springerlink.com/index/k720537752657m81.pdf}, year = {2006} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{Gray1985, abstract = {Attempts to show that the experimental psychology of the rat and the neuropsychology of the rat's brain are of relevance to clinical psychology. It is suggested that there is a false dichotomy between the behaviorist and cognitive approaches to psychology and illustrates this by going from a behaviorist analysis of a psychological concept (anxiety) to a cognitive analysis of that concept, basing the argument on brain research: Damage to the septo-hippocampal system mimics the behavioral effects of the antianxiety drugs. The reason for this mimicry is probably that these drugs reduce the noradrenergic input to the septo-hippocampal system. The noradrenergic input is normally activated under conditions of stress and serves to increase the capacity of the septo-hippocampal system to handle information. It seems probable, therefore, that the state of anxiety is, to some degree at least, mediated by activity in the septo-hippocampal system. It is emphasized that there is no dichotomy between cognitive and behaviorist psychology because the brain controls both behavior and cognition.}, author = {Gray, J. A}, journal = {Bulletin of the British Psychological Society}, pages = {99--112}, title = {{The whole and its parts: Behaviour, the brain, cognition and emotion}}, volume = {38}, year = {1985} } @article{Wasfy2004a, abstract = {An interrogative visualization environment is described for the interactive display and querying of large datasets. The environment combines a web-based intelligent agent facility with a visualization engine. The intelligent agent facility (IAF) incorporates a rule-based expert system for natural-language understanding, voice and text input facilities, a hierarchical clickable command list, an interface for multimodal devices such as menu-based wireless handheld devices and gesture recognition devices, and human-like avatars acting as virtual assistants. The IAF interacts with, and controls, the visualization engine through a TCP/IP network socket interface. The environment enables multiple users using a variety of interaction modes and devices to effectively browse through large amounts of data, focus on and query interesting features, and more easily comprehend and make use of the data. Application of the environment to the visualization of engineering simulations is described.}, author = {Wasfy, Hatem M. and Wasfy, Tamer M. and Noor, Ahmed K.}, doi = {10.1016/j.advengsoft.2004.06.015}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wasfy, Wasfy, Noor - 2004 - An interrogative visualization environment for large-scale engineering simulations.pdf:pdf}, issn = {09659978}, journal = {Advances in Engineering Software}, keywords = {expert system,intelligent software agent,interface,multimodal,natural language,visualization}, month = dec, number = {12}, pages = {805--813}, title = {{An interrogative visualization environment for large-scale engineering simulations}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0965997804001310}, volume = {35}, year = {2004} } @book{Chessick1992, address = {Northvale, N.J.}, author = {Chessick, R. D.}, publisher = {Jason Aronson}, title = {{What constitutes the patient in psychotherapy: Alternative approaches to understanding humans}}, year = {1992} } @article{Fulmer2009, author = {Fulmer, Sara M. and Frijters, Jan C.}, doi = {10.1007/s10648-009-9107-x}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Fulmer, Frijters - 2009 - A Review of Self-Report and Alternative Approaches in the Measurement of Student Motivation.pdf:pdf}, issn = {1040-726X}, journal = {Educational Psychology Review}, keywords = {1982,adolescents,and cognitive forces that,behavior,biological,children,direct,driven by a,history within educational research,however,measurement,motivation,motivation consists of the,motivation has a long,physiological,recent research has been,review,see reviews by ball,social,weiner 1992,young 1950}, month = aug, number = {3}, pages = {219--246}, title = {{A Review of Self-Report and Alternative Approaches in the Measurement of Student Motivation}}, url = {http://link.springer.com/10.1007/s10648-009-9107-x}, volume = {21}, year = {2009} } @misc{Lisetti2004, abstract = {The development of an autonomous social robot, Cherry, is occurring in tandem with studies gaining potential user preferences, likes, dislikes, and perceptions of her features. Thus far, results have indicated that individuals 1) believe that service robots with emotion and personality capabilities would make them more acceptable in everyday roles in human life, 2) prefer that robots communicate via both human-like facial expressions, voice, and text-based media, 3) become more positive about the idea of service and social robots after exposure to the technology, and 4) find the appearance and facial features of Cherry pleasing. The results of these studies provide the basis for future research efforts, which are discussed.}, author = {Lisetti, Christine L and Brown, S M Brown S M and Alvarez, K Alvarez K and Marpaung, A H Marpaung A H}, booktitle = {IEEE Transactions on Systems Man and Cybernetics Part C Applications and Reviews}, doi = {10.1109/TSMCC.2004.826278}, issn = {10946977}, number = {2}, pages = {195--209}, publisher = {IEEE}, title = {{A social informatics approach to human-robot interaction with a service social robot}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=1291667}, volume = {34}, year = {2004} } @article{VanSwol2003, author = {{Van Swol}, Lyn M.}, doi = {10.1177/0093650203253318}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Van Swol - 2003 - The Effects of Nonverbal Mirroring on Perceived Persuasiveness, Agreement with an Imitator, and Reciprocity in a Group.pdf:pdf}, issn = {00000000}, journal = {Communication Research}, month = aug, number = {4}, pages = {461--480}, title = {{The Effects of Nonverbal Mirroring on Perceived Persuasiveness, Agreement with an Imitator, and Reciprocity in a Group Discussion}}, volume = {30}, year = {2003} } @phdthesis{Sze2005, author = {Sze, Ian}, booktitle = {Ambient Intelligence in Everyday Life}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sze - 2005 - Empathic computing.pdf:pdf}, school = {UNIVERSITY OF NEW SOUTH WALES}, title = {{Empathic computing}}, year = {2005} } @inproceedings{Whitehill2008, author = {Whitehill, Jacob and Bartlett, Marian and Movellan, Javier R}, booktitle = {Intelligent Tutoring Systems}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Whitehill, Bartlett, Movellan - 2008 - Measuring the perceived difficulty of a lecture using automatic facial expression recognition.pdf:pdf}, pages = {668--670}, publisher = {Springer}, title = {{Measuring the perceived difficulty of a lecture using automatic facial expression recognition}}, year = {2008} } @article{Bradley2008, abstract = {Pupil diameter was monitored during picture viewing to assess effects of hedonic valence and emotional arousal on pupillary responses. Autonomic activity (heart rate and skin conductance) was concurrently measured to determine whether pupillary changes are mediated by parasympathetic or sympathetic activation. Following an initial light reflex, pupillary changes were larger when viewing emotionally arousing pictures, regardless of whether these were pleasant or unpleasant. Pupillary changes during picture viewing covaried with skin conductance change, supporting the interpretation that sympathetic nervous system activity modulates these changes in the context of affective picture viewing. Taken together, the data provide strong support for the hypothesis that the pupil's response during affective picture viewing reflects emotional arousal associated with increased sympathetic activity.}, author = {Bradley, Margaret M and Miccoli, Laura and Escrig, Miguel A and Lang, Peter J}, institution = {Center for the Study of Emotion and Attention, Box 112766, University of Florida, Gainesville, FL 32611, USA. bradley@ufl.edu}, journal = {Psychophysiology}, keywords = {adolescent,adult,arousal,arousal physiology,autonomic nervous system,autonomic nervous system physiology,emotions,emotions physiology,female,galvanic skin response,galvanic skin response physiology,heart rate,heart rate physiology,humans,light,male,photic stimulation,pupil,pupil physiology}, number = {4}, pages = {602--607}, pmid = {18282202}, publisher = {Wiley Online Library}, title = {{The pupil as a measure of emotional arousal and autonomic activation.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18282202}, volume = {45}, year = {2008} } @article{Alencar2014, author = {Alencar, M\'{a}rcio and Netto, Jos\'{e} Francisco}, doi = {10.1007/978-3-662-44651-5\_14}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Alencar, Netto - 2014 - TUtor Collaborator Using Multi-Agent System.pdf:pdf}, isbn = {978-3-662-44650-8}, journal = {Collaboration Technologies and Social Computing}, number = {Communications in Computer and Information Science}, pages = {153--159}, title = {{TUtor Collaborator Using Multi-Agent System}}, volume = {460}, year = {2014} } @book{Dennett1987, author = {Dennett, D C}, booktitle = {Technology}, publisher = {MIT Press}, title = {{The Intentional Stance}}, year = {1987} } @article{Langner2010, author = {Langner, O. and Dotsch, R. and Bijlstra, G. and Wigboldus, D. H. J. and Hawk, S. T. and van Knippenberg, A.}, doi = {10.1080/ 02699930903485076}, journal = {Cognition and Emotion}, pages = {1377--1388}, title = {{Presentation and validation of the Radboud Faces Database}}, volume = {24}, year = {2010} } @article{Graf2002, abstract = {As we articulate speech, we usually move the head and exhibit various facial expressions. This visual aspect of speech aids understanding and helps communicating additional information, such as the speaker's mood. We analyze quantitatively head and facial movements that accompany speech and investigate how they relate to the text's prosodic structure. We recorded several hours of speech and measured the locations of the speakers' main facial features as well as their head poses. The text was evaluated with a prosody prediction tool, identifying phrase boundaries and pitch accents. Characteristic for most speakers are simple motion patterns that are repeatedly applied in synchrony with the main prosodic events. Direction and strength of head movements vary widely from one speaker to another, yet their timing is typically well synchronized with the spoken text. Understanding quantitatively the correlations between head movements and spoken text is important for synthesizing photo-realistic talking heads. Talking heads appear much more engaging when they exhibit realistic motion patterns.}, author = {Graf, Hans Peter and Cosatto, Eric and Strom, Volker and Huang, Fu Jie}, journal = {Proceedings of Fifth IEEE International Conference on Automatic Face Gesture Recognition}, pages = {396--401}, publisher = {IEEE Computer Society}, title = {{Visual prosody: facial movements accompanying speech}}, url = {http://ieeexplore.ieee.org/servlet/opac?punumber=7862}, year = {2002} } @article{Roesch2010, abstract = {To investigate the perception of emotional facial expressions, researchers rely on shared sets of photos or videos, most often generated by actor portrayals. The drawback of such standardized material is a lack of flexibility and controllability, as it does not allow the systematic parametric manipulation of specific features of facial expressions on the one hand, and of more general properties of the facial identity (age, ethnicity, gender) on the other. To remedy this problem, we developed FACSGen: a novel tool that allows the creation of realistic synthetic 3D facial stimuli, both static and dynamic, based on the Facial Action Coding System. FACSGen provides researchers with total control over facial action units, and corresponding informational cues in 3D synthetic faces. We present four studies validating both the software and the general methodology of systematically generating controlled facial expression patterns for stimulus presentation.}, author = {Roesch, Etienne B. and Tamarit, Lucas and Reveret, Lionel and Grandjean, Didier and Sander, David and Scherer, Klaus R.}, doi = {10.1007/s10919-010-0095-9}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Roesch et al. - 2010 - FACSGen A Tool to Synthesize Emotional Facial Expressions Through Systematic Manipulation of Facial Action Units.pdf:pdf}, issn = {0191-5886}, journal = {Journal of Nonverbal Behavior}, keywords = {Emotion,FACS,Facial action coding system,Facial expression,Research material,Software}, month = nov, number = {1}, pages = {1--16}, title = {{FACSGen: A Tool to Synthesize Emotional Facial Expressions Through Systematic Manipulation of Facial Action Units}}, url = {http://www.springerlink.com/index/10.1007/s10919-010-0095-9}, volume = {35}, year = {2010} } @inproceedings{Higashinaka2008, author = {Higashinaka, R. and Dohsaka, K. and Isozaki, H.}, booktitle = {Spoken Language Technology Workshop, 2008. SLT 2008. IEEE}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Higashinaka, Dohsaka, Isozaki - 2008 - Effects of self-disclosure and empathy in human-computer dialogue.pdf:pdf}, isbn = {9781424434725}, pages = {109--112}, publisher = {IEEE}, title = {{Effects of self-disclosure and empathy in human-computer dialogue}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=4777852}, year = {2008} } @inproceedings{Read2002, address = {Eindhoven, The Netherlands}, author = {Read, JC and MacFarlane, SJ and Casey, Chris}, booktitle = {Proceedings of the International Workshop on ‘Interaction Design and Children}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Read, MacFarlane, Casey - 2002 - Endurability, engagement and expectations Measuring children's fun.pdf:pdf}, pages = {189--198}, publisher = {Shaker Publishing}, title = {{Endurability, engagement and expectations: Measuring children's fun}}, url = {http://www.researchgate.net/publication/228870976\_Endurability\_engagement\_and\_expectations\_Measuring\_children's\_fun/file/3deec518618d0828ce.pdf}, year = {2002} } @incollection{Juslin2005, abstract = {(From the chapter) The aims of this chapter are manifold. First, it is intended as a general introduction to the field for the newcomer. Thus, the chapter offers hands-on information on how to conduct studies of vocal affect expression. Second, we hope to contribute to increased cumulativeness and comparability across studies, for instance with respect to definitions, classification categories, methods, and reporting. Third, we want to highlight new developments in the field that have occurred since a previous chapter on this subject was written (Scherer 1982). There has actually been reasonable progress on several issues, and it is crucial that future research proceeds from the current state of the art. Fourth, we hope to encourage using the voice as a tool in testing emotion theories. Fifth, we want to offer the reader examples of applications in various practical domains that involve vocal affect expression. Finally, and perhaps most importantly, the aim of the chapter is inspirational: throughout the text, we will try to convey the enthusiasm we have for this field of study. In our attempt to achieve these aims, we have opted for a chapter structure of a somewhat unusual kind. The chapter consists of a main text, which is interspersed with boxes (background material) and modules (practical guidelines) on particular topics that we refer to in the main text. We hope this will make it easier for the reader to quickly locate relevant information. The first section offers theoretical foundations. The following two sections focus on voice cues to affect and affect inferences from voice cues. In attempting such a broad review, it is difficult to avoid simplifying many complex issues and omitting certain aspects of the topics discussed. However, throughout the chapter, we will continually provide references for further reading. (PsycINFO Database Record (c) 2007 APA}, author = {Juslin, Patrick N and Scherer, Klaus R.}, booktitle = {The new handbook of Methods in Nonverbal Behavior Research}, chapter = {3}, editor = {Harrigan, J A and Rosenthal, R and Scherer, K R}, isbn = {0198529619}, pages = {65--135}, publisher = {Oxford University Press}, series = {The New Handbook of Methods in Nonverbal Behavior Research}, title = {{Vocal expression of affect}}, url = {http://books.google.com/books?hl=en\&lr=\&id=RTNNfOUI\_EIC\&oi=fnd\&pg=PA65\&dq=Vocal+Expression+of+Affect\&ots=0MYYmuwEi6\&sig=npVRYBK6JtZcsjyJZbz3qjaLpuM}, year = {2005} } @article{Bates1994, author = {Bates, Joseph}, doi = {10.1145/176789.176803}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bates - 1994 - The Role of Emotion in Believable Agents.pdf:pdf;:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bates - 1994 - The Role of Emotion in Believable Agents(2).pdf:pdf}, journal = {Communications of the ACM}, keywords = {animation,art,arti cial intelligence,believable agents,believable characters,emotion}, number = {7}, pages = {122--125}, title = {{The Role of Emotion in Believable Agents}}, volume = {37}, year = {1994} } @book{Watson1930, address = {Chicago}, author = {Watson, J. B.}, publisher = {University of Chicago Press}, title = {{Behaviorism}}, year = {1930} } @article{Valitutti2005, abstract = {This paper presents resources and functionalities for the selection of affective evaluative terms. An affective hierarchy as an extension of the WordNet-Affect lexical database was developed in the first place. The second phase was the development of a semantic similarity function, acquired automatically in an unsupervised way from a large corpus of texts that allows us to put into relation concepts and emotional categories. The integration of the two components is a key element for several applications.}, author = {Valitutti, Alessandro and Strapparava, Carlo and Stock, Oliviero}, doi = {10.1007/11573548\_61}, journal = {Affective Computing and Intelligent Interaction}, pages = {474--481}, title = {{Lexical Resources and Semantic Similarity for Affective Evaluative Expressions Generation}}, url = {http://dx.doi.org/10.1007/11573548\_61}, year = {2005} } @article{Kang2011a, abstract = {In this paper, we describe our findings from research designed to explore the effect of self-disclosure between virtual human counselors (interviewers) and human users (interviewees) on users' social responses in counseling sessions. To investigate this subject, we designed an experiment involving three conditions of self-disclosure: high-disclosure, low-disclosure, and non-disclosure. We measured users' sense of co-presence and social attraction to virtual counselors. The results demonstrated that users reported more co-presence and social attraction to virtual humans who disclosed highly intimate information about themselves than when compared to other virtual humans who disclosed less intimate or no information about themselves. In addition, a further analysis of users' verbal self-disclosure showed that users revealed a medium level of personal information more often when interacting with virtual humans that highly-disclosed about themselves, than when interacting with virtual humans disclosing less intimate or no information about themselves.}, author = {Kang, Sin-Hwa and Gratch, Jonathan}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kang, Gratch - 2011 - People like virtual counselors that highly-disclose about themselves.pdf:pdf}, issn = {0926-9630}, journal = {Studies in health technology and informatics}, keywords = {Adult,Computer Simulation,Counseling,Female,Humans,Interpersonal Relations,Male,Self Disclosure,User-Computer Interface}, month = jan, pages = {143--8}, pmid = {21685657}, title = {{People like virtual counselors that highly-disclose about themselves.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/21685657}, volume = {167}, year = {2011} } @article{Ybarra2005, author = {Ybarra, Michele L. and Eaton, William W.}, doi = {10.1007/s11020-005-3779-8}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ybarra, Eaton - 2005 - Internet-Based Mental Health Interventions.pdf:pdf}, issn = {1522-3434}, journal = {Mental Health Services Research}, keywords = {internet,intervention,mental health,psychology,public health}, month = jun, number = {2}, pages = {75--87}, title = {{Internet-Based Mental Health Interventions}}, url = {http://link.springer.com/10.1007/s11020-005-3779-8}, volume = {7}, year = {2005} } @incollection{Rogers1959, address = {New York}, author = {Rogers, C R}, booktitle = {Psychology: the Study of a Science}, chapter = {3}, editor = {Koch, S}, pages = {184--256}, publisher = {McGraw-Hill}, title = {{A theory of therapy, personality and interpersonal relationships as developed in the client-centered framework}}, volume = {3}, year = {1959} } @article{Vannini2010, abstract = {Bullying is widespread in European schools, despite multiple intervention strategies having been proposed over the years. The present study investigates the effects of a novel virtual learning strategy (“FearNot!”) to tackle bullying in both UK and German samples. The approach is intended primarily for victims to increase their coping skills and further to heighten empathy and defence of victims by non-involved bystanders. This paper focuses on the defender role. Applying quantitative as well as qualitative methodology, the present study found that “FearNot!” helped non-involved children to become defenders in the German sub-sample while it had no such effect in the UK sub-sample. German “New Defenders” (children who are initially uninvolved but are nominated as defenders by their peers after the intervention period) were found to be significantly more popular at baseline, and to show more cognitive empathy (Theory of Mind) for the virtual victims as compared to permanently non-involved pupils. Moreover, gender interacts with becoming a defender in its effects on affective empathy, with emotional contagion being particularly associated with New Defender status among girls. The findings are discussed in relation to previous research on anti-bullying intervention strategies and cultural differences in bullying prevalence rates and intervention outcomes.}, author = {Vannini, Natalie and Enz, Sibylle and Sapouna, Maria and Wolke, Dieter and Watson, Scott and Woods, Sarah and Dautenhahn, Kerstin and Hall, Lynne and Paiva, Ana and Andr\'{e}, Elizabeth and Aylett, Ruth and Schneider, Wolfgang}, doi = {10.1007/s10212-010-0035-4}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Vannini et al. - 2010 - “FearNot!” a computer-based anti-bullying-programme designed to foster peer intervention.pdf:pdf}, issn = {0256-2928}, journal = {European Journal of Psychology of Education}, month = jun, number = {1}, pages = {21--44}, title = {{“FearNot!”: a computer-based anti-bullying-programme designed to foster peer intervention}}, url = {http://www.springerlink.com/index/10.1007/s10212-010-0035-4 http://dx.doi.org/10.1007/s10212-010-0035-4}, volume = {26}, year = {2010} } @inproceedings{Schiel2002, abstract = {In this contribution we announce and describe in detail the new multimodal corpus evolving from the publicly funded German SmartKom project. The first release of the corpus (BAS SK-P 1.0) has been finished end of 2001 and will be ready for distribution to the scientific community in July 2002. The SmartKom corpus will be the first of a new generation of Language Resources (LR) designed for a more or less complete data gathering of human-machine communication combining acoustic, visual and tactile input and output modalities. Since the funding of about EU 2 Mio for this LR is 100\% public, the corpus will be available without royalties via the Bavarian Archive for Speech Signals (BAS) at the University of Munich.}, author = {Schiel, Florian and Steininger, Silke and T\"{u}rk, Ulrich}, booktitle = {Proceedings of the 3rd conference on language resources and evaluation LREC02}, number = {34}, pages = {200--206}, title = {{The SmartKom Multimodal Corpus at BAS}}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.7.9049\&rep=rep1\&type=pdf}, year = {2002} } @inproceedings{Lalmas2013, address = {Rio de Janeiro, Brazil}, author = {Lalmas, Mounia and O'Brien, Heather L. and Yom-Tov, Elad}, booktitle = {Tutorial s of the 22nd International World Wide Web Conference}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lalmas, O'Brien, Yom-Tov - 2013 - Measuring User Engagement.pdf:pdf}, title = {{Measuring User Engagement}}, year = {2013} } @inproceedings{Caridakis2006, author = {Caridakis, George and Malatesta, Lori and Kessous, Loic and Amir, Noam and Raouzaiou, Amaryllis and Karpouzis, Kostas}, booktitle = {Proceedings of the 8th international conference on Multimodal interfaces}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Caridakis et al. - 2006 - Modeling naturalistic affective states via facial and vocal expressions recognition.pdf:pdf}, pages = {146--154}, publisher = {ACM}, title = {{Modeling naturalistic affective states via facial and vocal expressions recognition}}, year = {2006} } @book{Damasio1994, abstract = {""Although I cannot tell for certain what sparked my interest in the neural underpinnings of reason, I do know when I became convinced that the traditional views on the nature of rationality could not be correct." Thus begins a book that takes the reader on a journey of discovery, from the story of Phineas Gage, the famous nineteenth-century case of behavioral change that followed brain damage, to the contemporary recreation of Gage's brain; and from the doubts of a young neurologist to a testable hypothesis concerning the emotions and their fundamental role in rational human behavior." "Drawing on his experiences with neurological patients affected by brain damage (his laboratory is recognized worldwide as the foremost center for the study of such patients), Antonio Damasio shows how the absence of emotion and feeling can break down rationality. In the course of explaining how emotions and feelings contribute to reason and to adaptive social behavior, Damasio also offers a novel perspective on what emotions and feelings actually are: a direct sensing of our own body states, a link between the body and its survival-oriented regulations, on the one hand, and consciousness, on the other." "Descartes' Error leads us to conclude that human organisms are endowed from the very beginning with a spirited passion for making choices, which the social mind can use to build rational behavior."-BOOK JACKET.}, author = {Damasio, Antonio R}, booktitle = {New York}, editor = {Damasio, A R}, isbn = {0399138943}, number = {6988}, pages = {312}, pmid = {327}, publisher = {Putnam}, title = {{Descartes' Error: Emotion, Reason, and the Human Brain}}, url = {http://books.google.com/books?id=\_6gLAQAAIAAJ\&pgis=1}, volume = {33}, year = {1994} } @book{Peterson2008, author = {Peterson, Eric T and Demystified, W E B Analytics and Carrabis, Joseph and Group, Nextstage}, booktitle = {Web Analytics Demystified}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Peterson et al. - 2008 - Measuring the Immeasurable Visitor Engagement.pdf:pdf}, publisher = {Web Analytics Demystified (www.webanalyticsdemystified.com)}, title = {{Measuring the Immeasurable: Visitor Engagement}}, volume = {September}, year = {2008} } @inproceedings{Cassell1994, abstract = {We describe an implemented system which automatically generates and animates conversations between multiple human-like agents with appropriate and synchronized speech, intonation, facial expressions, and hand gestures. Conversation is created by a dialogue planner that produces the text as well as the intonation of the utterances. The the text, and the intonation in turn drive facial expressions, lip motions, eye gaze, head motion, and arm gestures generators. Coordinated arm, wrist, and hand motions are invoked to create semantically meaningful gestures. Throughout we will use examples from an actual synthesized, fully animated conversation.}, author = {Cassell, Justine and Pelachaud, Catherine and Badler, Norman and Steedman, Mark and Achorn, Brett and Becket, Tripp and Douville, Brett and Prevost, Scott and Stone, Matthew}, booktitle = {Proceedings of the 21st annual conference on Computer graphics and interactive techniques}, doi = {10.1145/192161.192272}, isbn = {0897916670}, issn = {00978930}, number = {Annual Conference Series}, organization = {ACM New York, NY, USA}, pages = {413--420}, publisher = {ACM}, series = {SIGGRAPH '94}, title = {{Animated conversation: rule-based generation of facial expression, gesture \& spoken intonation for multiple conversational agents}}, url = {http://doi.acm.org/10.1145/192161.192272}, volume = {28}, year = {1994} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/The Mendeley Support Team - 2011 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @incollection{Liu2008, author = {Liu, Zhen}, booktitle = {Affective Computing, Focus on Emotion Expression, Synthesis and Recognition}, chapter = {12}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Liu - 2008 - Computational Emotion Model for Virtual Characters.pdf:pdf}, pages = {235--254}, publisher = {InTech Education and Publishing}, title = {{Computational Emotion Model for Virtual Characters}}, url = {http://www.intechopen.com/source/pdfs/5186/InTech-Computational\_emotion\_model\_for\_virtual\_characters.pdf}, year = {2008} } @article{Kiesler2008, author = {Kiesler, Sara and Powers, Aaron and Fussell, Susan R. and Torrey, Cristen}, doi = {10.1521/soco.2008.26.2.169}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kiesler et al. - 2008 - Anthropomorphic Interactions with a Robot and Robot–like Agent.pdf:pdf}, issn = {0278-016X}, journal = {Social Cognition}, month = apr, number = {2}, pages = {169--181}, title = {{Anthropomorphic Interactions with a Robot and Robot–like Agent}}, url = {http://guilfordjournals.com/doi/abs/10.1521/soco.2008.26.2.169}, volume = {26}, year = {2008} } @article{Yacoub2003, author = {Yacoub, Sherif and Simske, Steve and Lin, Xiaofan and Burns, John}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Yacoub et al. - 2003 - Recognition of emotions in interactive voice response systems.pdf:pdf}, journal = {8th European Conference on Speech Communication and Technology}, number = {September}, pages = {1--4}, title = {{Recognition of emotions in interactive voice response systems}}, url = {http://www.isca-speech.org/archive/eurospeech\_2003/e03\_0729.html}, year = {2003} } @book{Babor2001, abstract = {This manual introduces the AUDIT, the Alcohol Use Disorders Identification Test, and describes how to use it to identify persons with hazardous and harmful patterns of alcohol consumption. The AUDIT was developed by the World Health Organization (WHO) as a simple method of screening for excessive drinking and to assist in brief assessment. It can help in identifying excessive drinking as the cause of the presenting illness. It also provides a framework for intervention to help hazardous and harmful drinkers reduce or cease alcohol consumption and thereby avoid the harmful consequences of their drinking. The first edition of this manual was published in 1989 (Document No. WHO/MNH/DAT/89.4) and was subsequently updated in 1992 (WHO/PSA/92.4). Since that time it has enjoyed widespread use by both health workers and alcohol researchers. With the growing use of alcohol screening and the international popularity of the AUDIT, there was a need to revise the manual to take into account advances in research and clinical experience. This manual is written primarily for health care practitioners, but other professionals who encounter persons with alcohol-related problems may also find it useful. It is designed to be used in conjunction with a companion document that provides complementary information about early intervention procedures, entitled “Brief Intervention for Hazardous and Harmful Drinking: A Manual for Use in Primary Care”. Together these manuals describe a comprehensive approach to screening and brief intervention for alcohol-related problems in primary health care.}, author = {Babor, Thomas F. and Higgins-Biddle, John C. and Saunders, John B. and Monteiro, Maristela G.}, edition = {2}, pages = {39}, publisher = {World Health Organization, Department of Mental Health and Substance Dependence}, title = {{AUDIT: The Alcohol Use Disorders Identification Test. Guidelines for use in primary health care}}, year = {2001} } @article{DavilaRoss2008, abstract = {Emotional contagion enables individuals to experience emotions of others. This important empathic phenomenon is closely linked to facial mimicry, where facial displays evoke the same facial expressions in social partners. In humans, facial mimicry can be voluntary or involuntary, whereby its latter mode can be processed as rapid as within or at 1s. Thus far, studies have not provided evidence of rapid involuntary facial mimicry in animals. This study assessed whether rapid involuntary facial mimicry is present in orangutans (Pongo pygmaeus; N=25) for their open-mouth faces (OMFs) during everyday dyadic play. Results clearly indicated that orangutans rapidly mimicked OMFs of their playmates within or at 1s. Our study revealed the first evidence on rapid involuntary facial mimicry in non-human mammals. This finding suggests that fundamental building blocks of positive emotional contagion and empathy that link to rapid involuntary facial mimicry in humans have homologues in non-human primates.}, author = {{Davila Ross}, Marina and Menzler, Susanne and Zimmermann, Elke}, institution = {Centre for the study of Emotion, Department of Psychology, University of Portsmouth, Portsmouth, Hampshire PO1 2DY, UK. marina.davila-ross@port.ac.uk}, journal = {Biology Letters}, keywords = {animal,animal physiology,animals,behavior,face,face physiology,pongo pygmaeus,pongo pygmaeus physiology,social behavior,time factors,videotape recording}, number = {1}, pages = {27--30}, publisher = {The Royal Society}, title = {{Rapid facial mimicry in orangutan play}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18077238}, volume = {4}, year = {2008} } @article{Kahn2007, abstract = {The Linguistic Inquiry and Word Count (LIWC) text analysis program often is used as a measure of emotion expression, yet the construct validity of its use for this purpose has not been examined. Three experimental studies assessed whether the LIWC counts of emotion processes words are sensitive to verbal expression of sadness and amusement. Experiment 1 determined that sad and amusing written autobiographical memories differed in LIWC emotion counts in expected ways. Experiment 2 revealed that reactions to emotionally provocative film clips designed to manipulate the momentary experience of sadness and amusement differed in LIWC counts. Experiment 3 replicated the findings of Experiment 2 and found generally weak relations between LIWC emotion counts and individual differences in emotional reactivity, dispositional expressivity, and personality. The LIWC therefore appears to be a valid method for measuring verbal expression of emotion.}, author = {Kahn, Jeffrey H and Tobin, Ren\'{e}e M and Massey, Audra E and Anderson, Jennifer A}, institution = {Department of Psychology, Illinois State University, Normal, IL 61790-4620, USA. jhkahn@ilstu.edu}, journal = {The American journal of psychology}, number = {2}, pages = {263--286}, pmid = {17650921}, publisher = {JSTOR}, title = {{Measuring emotional expression with the Linguistic Inquiry and Word Count.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17650921}, volume = {120}, year = {2007} } @article{Vogt2008, author = {Vogt, Thurid and Andr, Elisabeth and Wagner, Johannes}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Vogt, Andr, Wagner - 2008 - Automatic Recognition of Emotions from Speech A Review of the Literature and Recommendations for Practical.pdf:pdf}, journal = {Affect and Emotion in HCI}, pages = {75--91}, title = {{Automatic Recognition of Emotions from Speech : A Review of the Literature and Recommendations for Practical Realisation}}, volume = {LNCS 4868}, year = {2008} } @inproceedings{Boukricha2007, abstract = {Addressing user’s emotions in human-computer interaction significantly enhances the believability and lifelikeness of virtual humans. Emotion recognition and interpretation is realized in our approach by integrating empathy as a designated process within the agent’s cognitive architecture. In this paper we describe this empathy process which comprises of two interconnected components: a belief-desire-intention (BDI) based cognitive component and an affective component based on the emotion simulation system of the virtual human Max.}, address = {Osnabr\"{u}ck, Germany}, author = {Boukricha, Hana and Becker-Asano, Christian}, booktitle = {Proceedings of the 2nd Workshop at KI2007 on Emotion and Computing – Current Research and Future Impact}, editor = {{Dirk Reichardt} and Levi, Paul}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Boukricha, Becker-Asano - 2007 - Simulating empathy for the virtual human max.pdf:pdf}, pages = {23--28}, title = {{Simulating empathy for the virtual human max}}, url = {http://wwwlehre.dhbw-stuttgart.de/~reichard/itemotion/2007/}, year = {2007} } @misc{Mota2003, abstract = {This paper presents a system for recognizing naturally occurring postures and associated affective states related to a child's interest level while performing a learning task on a computer. Postures are gathered using two matrices of pressure sensors mounted on the seat and back of a chair. Subsequently, posture features are extracted using a mixture of four gaussians, and input to a 3-layer feed-forward neural network. The neural network classifies nine postures in real time and achieves an overall accuracy of 87.6\&x025; when tested with postures coming from new subjects. A set of independent Hidden Markov Models (HMMs) is used to analyze temporal patterns among these posture sequences in order to determine three categories related to a child's level of interest, as rated by human observers. The system reaches an overall performance of 82.3\&x025; with posture sequences coming from known subjects and 76.5\&x025; with unknown subjects.}, author = {Mota, Selene and Picard, Rosalind W.}, booktitle = {2003 Conference on Computer Vision and Pattern Recognition Workshop}, doi = {10.1109/CVPRW.2003.10047}, institution = {IEEE}, isbn = {0769519008}, issn = {10636919}, pages = {49--49}, publisher = {Ieee}, title = {{Automated Posture Analysis for Detecting Learner's Interest Level}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=4624309}, volume = {5}, year = {2003} } @phdthesis{Bickmore2003, author = {Bickmore, Timothy Wallace}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Bickmore - 2003 - Relational Agents Effecting Change through Human-Computer Relationships by Certified by.pdf:pdf}, school = {Massachusetts Institute of Technology}, title = {{Relational Agents : Effecting Change through Human-Computer Relationships by Certified by}}, type = {Doctor of Philosophy}, year = {2003} } @inproceedings{Pulman2010, abstract = {We describe a ‘How was your day?’ (HWYD) Companion whose purpose is to establish a comforting and supportive rela- tionship with a user via a conversation on a variety of work-related topics. The sys- tem has several fairly novel features aimed at increasing the naturalness of the interac- tion: a rapid ‘short loop’ response primed by the results of acoustic emotion anal- ysis, and an ‘interruption manager’, en- abling the user to interrupt lengthy or ap- parently inappropriate system responses, prompting a replanning of behaviour on the part of the system. The ‘long loop’ also takes into account the emotional state of the user, but using more conventional dialogue management and planning tech- niques. We describe the architecture and components of the implemented prototype HWYD system.}, address = {Uppsala, Sweden}, author = {Pulman, S G and Boye, J and Cavazza, M and Smith, Cameron}, booktitle = {Proceedings of the 2010 Workshop on Companionable Dialogue Systems, ACL 2010}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Pulman et al. - 2010 - ‘ How was your day ’.pdf:pdf}, number = {July}, pages = {37--42}, publisher = {Association for Computational Linguistics}, title = {{‘ How was your day ?’}}, year = {2010} } @inproceedings{Esuli2006, abstract = {Opinion mining (OM) is a recent subdiscipline at the crossroads of information retrieval and computational linguistics which is concerned not with the topic a document is about, but with the opinion it expresses. OM has a rich set of applications, ranging from tracking users' opinions about products or about political candidates as expressed in online forums, to customer relationship management. In order to aid the extraction of opinions from text, recent research has tried to automatically determine the "PN-polarity" of subjective terms, i.e. identify whether a term that is a marker of opinionated content has a positive or a negative connotation. Research on determining whether a term is indeed a marker of opinionated content (a subjective term) or not (an objective term) has been, instead, much more scarce. In this work we describe SENTIWORDNET, a lexical resource in which each WORDNET synset s is associated to three numerical scores Obj(s), Pos(s) and Neg(s), describing how objective, positive, and negative the terms contained in the synset are. The method used to develop SENTIWORDNET is based on the quantitative analysis of the glosses associated to synsets, and on the use of the resulting vectorial term representations for semi-supervised synset classification. The three scores are derived by combining the results produced by a committee of eight ternary classifiers, all characterized by similar accuracy levels but different classification behaviour. SENTIWORDNET is freely available for research purposes, and is endowed with a Web-based graphical user interface.}, address = {Genoa, Italy}, author = {Esuli, Andrea and Sebastiani, Fabrizio and Moruzzi, Via Giuseppe}, booktitle = {Proceedings of the 5th Conference on Language Resources and Evaluation (LREC’06)}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Esuli, Sebastiani, Moruzzi - 2006 - SENTIWORDNET A Publicly Available Lexical Resource for Opinion Mining.pdf:pdf}, pages = {417--422}, publisher = {European Language Resources Association (ELRA)}, title = {{SENTIWORDNET: A Publicly Available Lexical Resource for Opinion Mining}}, year = {2006} } @article{Happ2011, author = {Happ, Christian and Melzer, Andr\'{e}}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Happ, Melzer - 2011 - Bringing Empathy into Play On the Effects of Empathy in Violent and Nonviolent Video Games.pdf:pdf}, journal = {Ifip International Federation For Information Processing}, keywords = {1,1 prosocial and antisocial,aggression,anderson and his colleagues,confirmed that video game,effects of video games,empathy,furthermore,in a recent overview,prosocial behavior,related to indicators of,video games,violence exposure is positively}, pages = {371--374}, title = {{Bringing Empathy into Play: On the Effects of Empathy in Violent and Nonviolent Video Games}}, url = {http://www.springerlink.com/index/P76556V1HN316RK6.pdf}, year = {2011} } @inproceedings{Arrington2011, abstract = {One of the many factors that contribute to the decline in Computer Science retention is poor performance in foundation programming courses. In the Introduction to Programming course here at UNC Charlotte, it has been observed that poor assessment performance is often attributed to students feeling they understand material when they often don’t. This iteration of the Dr. Chestr Show seeks to overcome this disconnection by assisting in the guidance of review based on routine lecture quiz performance. The Dr. Chestr show presents users with questions about the C++ programming language based on topics covered during lecture. This paper describes the design and implementation of the Dr. Chestr virtual human and his game show environment.}, address = {New York, New York, USA}, author = {Arrington, Carl and Wilson, Dale-Marie and Lehmann, Lorrie}, booktitle = {Proceedings of the 49th Annual Southeast Regional Conference on - ACM-SE '11}, doi = {10.1145/2016039.2016127}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Arrington, Wilson, Lehmann - 2011 - Improving performance and retention in computer science courses using a virtual game show.pdf:pdf}, isbn = {9781450306867}, keywords = {Design,Human Factors.}, pages = {320}, publisher = {ACM Press}, title = {{Improving performance and retention in computer science courses using a virtual game show}}, url = {http://dl.acm.org/citation.cfm?doid=2016039.2016127}, year = {2011} } @article{Weizenbaum1966, abstract = {ELIZA is a program operating within the MAC time-sharing system at MIT which makes certain kinds of natural language conversation between man and computer possible. Input sen- tences are analyzed on the basis of decomposition rules which are triggered by key words appearing in the input text. Responses are generated by reassembly rules associated with selected decomposition rules. The fundamental technical prob- lems with which ELIZA is concerned are: (1) the identification of key words, (2) the discovery of minimal context, (3) the choice of appropriate transformations, (4) generation of responses in the absence of key words, and (5) the provision of an editing capability for ELIZA "scripts". A discussion of some psychologi- cal issues relevant to the ELIZA approach as well as of future developments concludes the paper.}, author = {{Joseph Weizenbaum}}, doi = {10.1145/365153.365168}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Joseph Weizenbaum - 1966 - ELIZA - A Computer Program For the Study of Natural Language Communication Between Man And Machine.pdf:pdf}, issn = {0549-4974}, journal = {Communications of the ACM}, number = {1}, pages = {36--45}, title = {{ELIZA - A Computer Program For the Study of Natural Language Communication Between Man And Machine}}, url = {http://joi.jlc.jst.go.jp/JST.Journalarchive/jje1965/2.3\_1?from=CrossRef}, volume = {9}, year = {1966} } @incollection{Deng2008a, address = {London}, author = {Deng, Zhigang and Noh, Junyong}, booktitle = {Data-Driven 3D Facial Animation}, chapter = {1}, editor = {Deng, Zhigang and Neumann, Ulrich}, isbn = {978-1-84628-906-4}, pages = {1--28}, publisher = {Springer Verlag}, title = {{Computer Facial Animation: A Survey}}, year = {2008} } @article{Decety2004, abstract = {Empathy accounts for the naturally occurring subjective experience of similarity between the feelings expressed by self and others without loosing sight of whose feelings belong to whom. Empathy involves not only the affective experience of the other person's actual or inferred emotional state but also some minimal recognition and understanding of another's emotional state. In light of multiple levels of analysis ranging from developmental psychology, social psychology, cognitive neuroscience, and clinical neuropsychology, this article proposes a model of empathy that involves parallel and distributed processing in a number of dissociable computational mechanisms. Shared neural representations, self-awareness, mental flexibility, and emotion regulation constitute the basic macrocomponents of empathy, which are underpinned by specific neural systems. This functional model may be used to make specific predictions about the various empathy deficits that can be encountered in different forms of social and neurological disorders.}, author = {Decety, Jean and Jackson, Philip L}, doi = {10.1177/1534582304267187}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Decety, Jackson - 2004 - The functional architecture of human empathy.pdf:pdf}, isbn = {1534582304267}, issn = {1534-5823}, journal = {Behavioral and cognitive neuroscience reviews}, keywords = {affective sharing,emotion regulation,executive inhibition,intersubjectivity,perspective taking,self-awareness,shared representations}, month = jun, number = {2}, pages = {71--100}, pmid = {15537986}, title = {{The functional architecture of human empathy}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/15537986}, volume = {3}, year = {2004} } @article{Ramage2007, author = {Ramage, Daniel}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ramage - 2007 - Hidden Markov models fundamentals.pdf:pdf}, journal = {Lecture Notes (CS229 Section Notes)}, pages = {1--13}, title = {{Hidden Markov models fundamentals}}, url = {http://see.stanford.edu/materials/aimlcs229/cs229-hmm.pdf}, year = {2007} } @phdthesis{Sidorova2007, author = {Sidorova, Julia}, booktitle = {Speech communication}, file = {:C$\backslash$:/Users/ramin001/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sidorova - 2007 - Speech emotion recognition using hidden Markov models.pdf:pdf}, school = {Universitat Pompeu Fabra}, title = {{Speech emotion recognition using hidden Markov models}}, year = {2007} } @article{Ong1995, abstract = {Communication can be seen as the main ingredient in medical care. In reviewing doctor-patient communication, the following topics are addressed: (1) different purposes of medical communication; (2) analysis of doctor-patient communication; (3) specific communicative behaviors; (4) the influence of communicative behaviors on patient outcomes; and (5) concluding remarks. Three different purposes of communication are identified, namely: (a) creating a good inter-personal relationship; (b) exchanging information; and (c) making treatment-related decisions. Communication during medical encounters can be analyzed by using different interaction analysis systems (IAS). These systems differ with regard to their clinical relevance, observational strategy, reliability/validity and channels of communicative behavior. Several communicative behaviors that occur in consultations are discussed: instrumental (cure oriented) vs affective (care oriented) behavior, verbal vs non-verbal behavior, privacy behavior, high vs low controlling behavior, and medical vs everyday language vocabularies. Consequences of specific physician behaviors on certain patient outcomes, namely: satisfaction, compliance/adherence to treatment, recall and understanding of information, and health status/psychiatric morbidity are described. Finally, a framework relating background, process and outcome variables is presented.}, author = {Ong, L M and {De Haes}, J C and Hoos, A M and Lammes, F B}, doi = {10.1016/0277-9536(94)00155-M}, institution = {Department of Medical Psychology, Academic Medical Hospital, Amsterdam, The Netherlands.}, issn = {02779536}, journal = {Social Science \& Medicine (1982)}, keywords = {communication,humans,patient education topic,physician patient relations,treatment outcome}, number = {7}, pages = {903--18}, pmid = {7792630}, publisher = {Elsevier}, title = {{Doctor-patient communication: a review of the literature.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18666041}, volume = {40}, year = {1995} } @article{Decety2004, abstract = {Empathy accounts for the naturally occurring subjective experience of similarity between the feelings expressed by self and others without loosing sight of whose feelings belong to whom. Empathy involves not only the affective experience of the other person's actual or inferred emotional state but also some minimal recognition and understanding of another's emotional state. In light of multiple levels of analysis ranging from developmental psychology, social psychology, cognitive neuroscience, and clinical neuropsychology, this article proposes a model of empathy that involves parallel and distributed processing in a number of dissociable computational mechanisms. Shared neural representations, self-awareness, mental flexibility, and emotion regulation constitute the basic macrocomponents of empathy, which are underpinned by specific neural systems. This functional model may be used to make specific predictions about the various empathy deficits that can be encountered in different forms of social and neurological disorders.}, author = {Decety, Jean and Jackson, Philip L}, doi = {10.1177/1534582304267187}, file = {::}, isbn = {1534582304267}, issn = {1534-5823}, journal = {Behavioral and cognitive neuroscience reviews}, keywords = {affective sharing,emotion regulation,executive inhibition,intersubjectivity,perspective taking,self-awareness,shared representations}, month = jun, number = {2}, pages = {71--100}, pmid = {15537986}, title = {{The functional architecture of human empathy}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/15537986}, volume = {3}, year = {2004} } @incollection{Miller1986, abstract = {The matching hypothesis proposes that clients problem-drinkers who are matched to appropriate treatments will show greater improvement than will those who are unmatched or mismatched undifferentiated treatment: the status quo research strategies predictor studies differential studies problem severity cognitive style neuropsychological status self-esteem social stability client choice (PsycINFO Database Record (c) 2006 APA, all rights reserved)}, author = {Miller, William R. and Hester, Reid K}, booktitle = {Treating addictive behaviors Processes of change}, editor = {Miller, William R and Heather, Nick}, isbn = {0306422484}, pages = {175--203}, publisher = {Plenum Press}, title = {{Matching problem drinkers with optimal treatments.}}, year = {1986} } @inproceedings{Amini2012, abstract = {In this article, we present HapFACS 1.0, a new software/API for generating static and dynamic three-dimensional facial expressions based on the Facial Action Coding System (FACS). HapFACS pro- vides total control over the FACS Action Units (AUs) activated at all levels of intensity. HapFACS allows generating faces with an individual AU or composition of AUs activated unilaterally or bilat- erally with different intensities. The reliable and emotionally valid facial expressions can be generated on infinite number of faces in different ethnicities, genders, and ages using HapFACS to be used in numerous scientific areas including psychology, emotion, FACS learning, clinical, and neuroscience research.}, address = {Vienna, AUSTRIA}, author = {Amini, Reza and Yasavur, U and Lisetti, Christine L}, booktitle = {Proceedings of the ACM 3rd International Symposium on Facial Analysis and Animation (FAA'12)}, file = {::}, publisher = {ACM Press}, title = {{HapFACS 1.0: Software/API for Generating FACS-Based Facial Expressions}}, url = {http://ascl.cis.fiu.edu/uploads/1/3/4/2/13423859/amini-faa-2012.pdf}, year = {2012} } @article{Prendinger2006, author = {Prendinger, H and Becker-Asano, Christian}, file = {::}, journal = {International Journal of Humanoid}, keywords = {affective behavior,empathy,evaluation,life-like characters,physiological user information}, number = {3}, pages = {371--391}, title = {{A STUDY IN USERS'S;PHYSIOLOGICAL RESPONSE TO AN EMPATHIC INTERFACE AGENT}}, volume = {3}, year = {2006} } @phdthesis{Li2007, author = {Li, Xi}, booktitle = {Interface}, file = {::}, school = {Marquette University}, title = {{SPEech Feature Toolbox (SPEFT) Design and Emotional Speech Feature Extraction}}, url = {http://speechlab.eece.mu.edu/johnson/papers/li\_thesis.pdf}, year = {2007} } @article{Orozco2010, author = {Orozco, H. and Thalmann, Daniel and Ramos, F.}, file = {::}, journal = {Proceedings of 11th Computer Graphics International, CGI}, title = {{Making empathetic virtual humans in human–computer interaction scenarios}}, url = {http://cgi2010.miralab.unige.ch/short/SP09/SP09.pdf}, volume = {10}, year = {2010} } @article{Warner1987, author = {Warner, Rebecca M. and Malloy, Daniel and Schneider, Kathy and Knoth, Russell and Wilder, Bruce}, doi = {10.1007/BF00990958}, file = {::}, issn = {0191-5886}, journal = {Journal of Nonverbal Behavior}, number = {2}, pages = {57--74}, title = {{Rhythmic organization of social interaction and observer ratings of positive affect and involvement}}, url = {http://www.springerlink.com/index/10.1007/BF00990958}, volume = {11}, year = {1987} } @book{Apa1994, abstract = {DSM-IV}, author = {APA}, booktitle = {W}, institution = {American Psychiatric Association}, isbn = {0890420629}, number = {VI}, pages = {xxvii, 886 p.}, pmid = {1595545}, publisher = {American Psychiatric Association}, title = {{Diagnostic and statistical manual of mental disorders: DSM-IV}}, url = {http://scholar.google.com/scholar?hl=en\&btnG=Search\&q=intitle:DSM-IV:+diagnostic+and+statistical+manual+of+mental+disorders\#0}, volume = {4th}, year = {1994} } @inproceedings{Bransky2011, abstract = {To understand the role that memory plays we have collected data from three online experimental sessions in which participants inter- act with our virtual real-estate agent in both a recall and forget mode. We found that partial forgetting and even total loss of recall of an item, whether domain or social-based, was more believable and less frustrating than incorrect recall.}, author = {Bransky, Karla and Richards, Debbie}, booktitle = {Intelligent Virtual Agents 10th International Conference (IVA 2011)}, doi = {10.1007/978-3-642-23974-8\_49}, file = {::}, keywords = {forget-,intelligent virtual agents,memory,remembering}, pages = {433--434}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{Users ’ s Expectations of IVA Recall and Forgetting}}, year = {2011} } @inproceedings{Boukricha2009, abstract = {a system for simulating emotional facial expressions for a virtual human has been evolved. This system consists of two parts: (1) a control ar- chitecture for simulating emotional facial expressions with respect to Pleasure, Arousal, and Dominance (PAD) val- ues, (2) an expressive output component for animating the virtual human’s facial muscle actions called Action Units (AUs), modeled following the Facial Action Coding Sys- tem (FACS). A large face repertoire of about 6000 faces arranged in PAD-space with respect to two dominance val- ues (dominant vs. submissive) is obtained as a result of the empirical study. Using the face repertoire an approach to- wards realizing facial mimicry for a virtual human based on backward mapping AUs displaying an emotional facial expression on PAD-values is outlined.}, address = {Amsterdam}, author = {Boukricha, Hana and Wachsmuth, Ipke and Hofstatter, A. and Grammer, Karl}, booktitle = {Interaction and Workshops of 3rd International Conference on Affective Computing and Intelligent ACII2009}, doi = {10.1109/ACII.2009.5349579}, file = {::}, isbn = {9781424447992}, pages = {1--7}, publisher = {IEEE}, title = {{Pleasure-arousal-dominance driven facial expression simulation}}, url = {http://ieeexplore.ieee.org/xpl/freeabs\_all.jsp?arnumber=5349579}, year = {2009} } @article{Panksepp1982, abstract = {Emotions seem to arise ultimately from hard-wired neural circuits in the visceral-limbic brain that facilitate diverse and adaptive behavioral and physiological response to major classes of environmental challenges. Presumable these circuits developed early in mammalian brain evolution, and the underlying contro mechanisms remain similar in humans and "lower" mammals. This would suggest that theoretically guided studies of the animal brain can reveal how primitive emotions are organized in the human brain. Conversely, granted these cross-specis heritage, it is arguable that human introspecive access to emotional states may provide direct information concerning operations of emotive circuits and thus be a primary source of hypothese for animal brain research. In this article the possibility that emotions are elaborated by transhypothalamic executive (command) circuits that concurrently activate related behavior patterns is assessed. Current neurobehavioral evidence indicates that there are at least four executive circuits of this type - those which elaborate central states of expectancy, rage, fear, and panic. The manner in which learning and psyuchiatric disorders may arise form activities of such circuits is also discussed.}, author = {Panksepp, J}, issn = {14691825}, journal = {Behavioral and Brain Sciences}, number = {3}, pages = {407--467}, title = {{Toward a general psychobiological theory of emotions}}, url = {http://scholar.google.com.au/scholar?as\_q=Panksepp+J+\&num=10\&btnG=Search+Scholar\&as\_epq=Toward+a+general+psychobiological+theory+of\&as\_oq=\&as\_eq=\&as\_occt=any\&as\_sauthors=\&as\_publication=\&as\_ylo=1982\&as\_yhi=1982\&as\_sdt=1.\&as\_sdtp=on\&as\_sdts=5\&hl=en\#0}, volume = {5}, year = {1982} } @inproceedings{Kashyap2012, abstract = {Earlier works on personalized Web search focused on the click- through graphs, while recent works leverage social annotations, which are often unavailable. On the other hand, many users are members of the social networks and subscribe to social groups. Intuitively, users in the same group may have similar relevance judgments for queries related to these groups. SonetRank utilizes this observation to personalize the Web search results based on the aggregate relevance feedback of the users in similar groups. SonetRank builds and maintains a rich graph-based model, termed Social Aware Search Graph, consisting of groups, users, queries and results click-through information. SonetRank’s personalization scheme learns in a principled way to leverage the following three signals, of decreasing strength: the personal document preferences of the user, of the users of her social groups relevant to the query, and of the other users in the network. SonetRank also uses a novel approach to measure the amount of personalization with respect to a user and a query, based on the query-specific richness of the user’s social profile. We evaluate SonetRank with users on Amazon Mechanical Turk and show a significant improvement in ranking compared to state-of-the-art techniques.}, address = {Maui, HI, USA}, author = {Kashyap, Abhijith and Amini, Reza and Hristidis, Vagelis}, booktitle = {ACM 21st Conference on Information and Knowledge Management CIKM 2012}, file = {::}, isbn = {9781450311564}, keywords = {Results Re-ranking.,Search Personalization,Social Search}, publisher = {ACM}, title = {{SonetRank : Leveraging Social Networks to Personalize Search}}, year = {2012} } @article{Wu2008, author = {Wu, Siew-Rong}, doi = {10.1109/DIGITEL.2008.27}, file = {::}, isbn = {978-0-7695-3409-1}, journal = {2008 Second IEEE International Conference on Digital Game and Intelligent Toy Enhanced Learning}, pages = {213--214}, publisher = {Ieee}, title = {{Humor and Empathy: Developing Students' Empathy through Teaching Robots to Tell English Jokes}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=4700764}, year = {2008} } @article{Fellner2012, abstract = {Individuals may differ in their ability to learn the significance of emotional cues within a specific context. If so, trait emotional intelligence (EI) may be associated with faster cue learning. This study (N = 180) tested whether trait EI predicts faster learning of a critical cue for discriminating ‘‘terrorists’’ from ‘‘non-terrorists’’, using virtual-reality heads as stimuli. The critical cue was either facial emotion (positive or negative), or a neutral feature (hat size). Cognitive ability and subjective state were also assessed. Par- ticipants were faster to learn with an emotive cue. Surprisingly, high trait EI was correlated with poorer performance, especially early in learning. Subjective distress was also associated with impaired learning to emotive cues. }, author = {Fellner, Angela N. and Matthews, Gerald and Shockley, Kevin D. and Warm, Joel S. and Zeidner, Moshe and Karlov, Lisa and Roberts, Richard D.}, doi = {10.1016/j.jrp.2012.01.004}, file = {::}, issn = {00926566}, journal = {Journal of Research in Personality}, keywords = {trait emotional intelligence}, month = jun, number = {3}, pages = {239--247}, publisher = {Elsevier Inc.}, title = {{Using emotional cues in a discrimination learning task: Effects of trait emotional intelligence and affective state}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0092656612000050}, volume = {46}, year = {2012} } @article{Boukricha2011, author = {Boukricha, Hana and Wachsmuth, Ipke}, doi = {10.1007/s13218-011-0109-8}, file = {::}, issn = {0933-1875}, journal = {KI - K\"{u}nstliche Intelligenz}, keywords = {agent-agent interaction,empathic virtual humans,human-agent,internal simulation}, month = may, number = {3}, pages = {195--204}, title = {{Empathy-Based Emotional Alignment for a Virtual Human: A Three-Step Approach}}, url = {http://www.springerlink.com/index/10.1007/s13218-011-0109-8}, volume = {25}, year = {2011} } @inproceedings{Pontier2009, abstract = {There is a growing belief that the environment plays an important role in the healing process of patients, supported by empirical findings. Previous research showed that psychological stress caused by loneliness can be reduced by artificial companions. As a pilot application for this purpose, this paper presents an affective agent playing tic-tac-toe with the user. Experimenting with a number of agents under different parameter settings shows the agent is able to show human-like emotional behavior, and can make decisions based on rationality as well as on affective influences. After discussing the application with clinical experts and making improvements where needed, the application can be tested in a clinical setting in future research.}, author = {Pontier, Matthijs and Siddiqui, Ghazanfar Farooq}, booktitle = {PRIMA '09 Proceedings of the 12th International Conference on Principles of Practice in Multi-Agent Systems}, editor = {et al. (Eds.):, J.-J. Yang}, file = {::}, keywords = {cognitive modeling,emotion modeling,healing environment}, pages = {33--47}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{An Affective Agent Playing Tic-Tac-Toe as Part of a}}, year = {2009} } @inproceedings{Cavazza2010, abstract = {This paper presents a dialogue system in the form of an ECA that acts as a socia- ble and emotionally intelligent compan- ion for the user. The system dialogue is not task-driven but is social conversation in which the user talks about his/her day at the office. During conversations the system monitors the emotional state of the user and uses that information to in- form its dialogue turns. The system is able to respond to spoken interruptions by the user, for example, the user can in- terrupt to correct the system. The system is already fully implemented and aspects of actual output will be used to illustrate.}, address = {The University of Tokyo}, author = {Cavazza, Marc and Vargas, C Emilio and Gil, Jos\'{e} Rela\~{n}o and Telef\'{o}nica, I D and Crook, Nigel and Field, Debora and Sheffield, S}, booktitle = {Proceedings of SIGDIAL 2010: the 11th Annual Meeting of the Special Interest Group on Discourse and Dialogue}, pages = {277--280}, publisher = {Association for Computational Linguistics}, title = {{‘ How was your day ?’ An affective companion ECA prototype}}, volume = {1}, year = {2010} } @incollection{Burke2002, address = {New-York,NY}, author = {Burke, B. L. and Arkowitz, H. and Dunn, C.}, booktitle = {Motivational Interviewing: Preparing People for Change}, edition = {2nd}, pages = {217--250}, publisher = {Guilford Press}, title = {{The Efficacy of Motivational Interviewing and Its Adaptation}}, year = {2002} } @inproceedings{Nguyen2009c, author = {Nguyen, H. and Masthoff, Judith}, booktitle = {Proceedings of the 4th International Conference on Persuasive Technology}, file = {::}, isbn = {9781605583761}, keywords = {affective computing,design,experimentation,human factors}, pages = {7}, publisher = {ACM}, title = {{Designing empathic computers: the effect of multimodal empathic feedback using animated agent}}, url = {http://dl.acm.org/citation.cfm?id=1541958}, year = {2009} } @article{Paiva2005a, author = {Paiva, Ana and Dias, Jo\~{a}o and Sobral, Daniel and Aylett, Ruth and Woods, Sarah and Hall, Lynne and Zoll, Carsten}, doi = {10.1080/08839510590910165}, issn = {0883-9514}, journal = {Applied Artificial Intelligence}, month = mar, number = {3-4}, pages = {235--266}, title = {{Learning By Feeling: Evoking Empathy With Synthetic Characters}}, url = {http://www.tandfonline.com/doi/abs/10.1080/08839510590910165}, volume = {19}, year = {2005} } @article{Jacob2011, author = {Jacob, Pierre}, doi = {10.1007/s13164-011-0065-0}, file = {::}, issn = {1878-5158}, journal = {Review of Philosophy and Psychology}, month = aug, number = {August}, pages = {519--540}, title = {{The Direct-Perception Model of Empathy: a Critique}}, url = {http://www.springerlink.com/index/10.1007/s13164-011-0065-0}, year = {2011} } @incollection{Catrambone2004, author = {Catrambone, Richard and Stasko, John and Xiao, Jun}, booktitle = {From Brows to Trust: Evaluating Embodied Conversational Agents}, chapter = {9}, editor = {Ruttkay, Zs\'{o}fia and Pelachaud, Catherine}, isbn = {1-4020-2730-3}, keywords = {embodied conversational agent,evaluation,research framework,task}, pages = {239--267}, publisher = {Kluwer Academic Publishers}, title = {{ECA as User Interface Paradigm: Experimental Findings within a Framework for Research}}, year = {2004} } @article{Brave2005, abstract = {Embodied computer agents are becoming an increasingly popular human-computer interaction technique. Often, these agents are programmed with the capacity for emotional expression. This paper investigates the psychological effects of emotion in agents upon users. In particular, two types of emotion were evaluated: self-oriented emotion and other-oriented, empathic emotion. In a 2 (self-oriented emotion: absent vs. present) by 2 (empathic emotion: absent vs. present) by 2 (gender dyad: male vs. female) between-subjects experiment (N = 96), empathic emotion was found to lead to more positive ratings of the agent by users, including greater likeability and trustworthiness, as well as greater perceived caring and felt support. No such effect was found for the presence of self-oriented emotion. Implications for the design of embodied computer agents are discussed and directions for future research suggested.}, author = {Brave, Scott and Nass, Clifford and Hutchinson, Kevin}, doi = {10.1016/j.ijhcs.2004.11.002}, issn = {10715819}, journal = {International Journal of Human-Computer Studies - Special issue: Subtle expressivity for characters and robots}, keywords = {affective computing,characters,embodied agents,emotion,empathy,social interfaces}, month = feb, number = {2}, pages = {161--178}, title = {{Computers that care: investigating the effects of orientation of emotion exhibited by an embodied computer agent}}, url = {http://www.sciencedirect.com/science/article/pii/S1071581904001284}, volume = {62}, year = {2005} } @article{Lakin2003, abstract = {The “chameleon effect” refers to the tendency to adopt the postures, gestures, and mannerisms of interaction partners (Chartrand \& Bargh, 1999). This type of mimicry occurs outside of conscious awareness, and without any intent to mimic or imitate. Empirical evidence suggests a bi-directional relationship between nonconscious mimicry on the one hand, and liking, rapport, and affiliation on the other. That is, nonconscious mimicry creates affiliation, and affiliation can be ex- pressed through nonconscious mimicry. We argue that mimicry played an impor- tant role in human evolution. Initially, mimicry may have had survival value by helping humans communicate. We propose that the purpose of mimicry has now evolved to serve a social function. Nonconscious behavioral mimicry increases af- filiation, which serves to foster relationships with others. We review current re- search in light of this proposed framework and suggest future areas of research.}, author = {Lakin, J. L. and Jefferis, VE and Cheng, CM}, file = {::}, journal = {Journal of nonverbal Behavior}, keywords = {affiliation,chameleon effect,human evolution,mimicry}, number = {3}, pages = {145--162}, title = {{The chameleon effect as social glue: Evidence for the evolutionary significance of nonconscious mimicry}}, volume = {27}, year = {2003} } @inproceedings{Lisetti2008a, author = {Lisetti, Christine L and Wagner, Eric}, booktitle = {Proceedings of the AAAI Spring Symposium on Emotion, Personality and Social Behavior}, file = {::}, keywords = {Technical Report SS-08-04}, title = {{Mental Health Promotion with Animated Characters : Exploring Issues and Potential}}, year = {2008} } @article{Devoldre2010, abstract = {Social support researchers and clinicians have repeatedly expressed the need to identify the antecedents of social support provision within close relationships. The aim of the present study is to investigate the extent to which individual differences in cognitive empathy (perspective taking) and affective empathy (empathic concern and personal distress) are predictive of social support provision in couples. Study 1 involved 83 female participants in a relatively young relationship; Study 2 involved 128 married couples. The authors used self-report measures in both studies to assess individual differences in empathy and participants' support provision behaviors. The main findings suggest a significant contribution of the different components of empathy with rather different pictures for each of these components. The authors discuss the present findings in light of existing theory and research on social support in relationships.}, author = {Devoldre, Inge and Davis, Mark H and Verhofstadt, Lesley L and Buysse, Ann}, doi = {10.1080/00223981003648294}, file = {::}, issn = {0022-3980}, journal = {The Journal of psychology}, keywords = {80 and over,Adolescent,Adult,Affect,Aged,Empathy,Family Characteristics,Female,Humans,Individuality,Male,Middle Aged,Personal Construct Theory,Personality Inventory,Personality Inventory: statistics \& numerical data,Psychometrics,Social Support,Young Adult}, number = {3}, pages = {259--284}, pmid = {20461931}, title = {{Empathy and social support provision in couples: social support and the need to study the underlying processes.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/21506454}, volume = {144}, year = {2010} } @article{Cai2006, abstract = {Empathic computing is an emergent paradigm that enables a system to understand human states and feelings and to share this intimate information. The new paradigm is made possible by the convergence of affordable sensors, embedded processors and wireless ad-hoc networks. The power law for multi-resolution channels and mobile-stationary sensor webs is introduced to resolve the information avalanche problems. As empathic computing is sensor-rich computing, particular models such as semantic differential expressions and inverse physics are discussed. A case study of a wearable sensor network for detection of a falling event is presented. It is found that the location of the wearable sensor is sensitive to the results. From the machine learning algorithm, the accuracy reaches up to 90\% from 21 simulated trials. Empathic computing is not limited to healthcare. It can also be applied to solve other everyday-life problems such as management of emails and stress.}, author = {Cai, Yang}, doi = {10.1007/11825890\_3}, file = {::}, journal = {Ambient Intelligence in Everyday Life, Lecture Notes in Computer Science}, pages = {67--85}, publisher = {Springer}, title = {{Empathic computing}}, url = {http://www.springerlink.com/index/l482m128476w5043.pdf}, volume = {3864/2006}, year = {2006} } @article{McClave2000, abstract = {Speaker head movements pattern predictably and have semantic, discourse, and communicative functions. Some head movements convey propositional content, while others carry semantic meanings beyond affirmation and negation. Side-to-side shakes correlate with expressions of inclusivity and intensification. Lateral movements also co-occur with uncertain statements and lexical repairs. In narration, head movements serve to locate referents in abstract space. A change in head posture marks switches between direct and indirect discourse, and speaker head nods function as backchannel requests to which listeners are extraordinarily sensitive. These findings are based on the microanalysis of videotaped conversations between native speakers of American English.}, author = {McClave, Evelyn Z}, doi = {10.1016/S0378-2166(99)00079-X}, issn = {03782166}, journal = {Journal of Pragmatics}, keywords = {ameri,backchannel,gesture,head movements,kinesic,nonverbal,speech}, number = {7}, pages = {855--878}, publisher = {Elsevier}, title = {{Linguistic functions of head movements in the context of speech}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S037821669900079X}, volume = {32}, year = {2000} } @article{Rabiner1989, author = {Rabiner, Lawrence R.}, file = {::}, journal = {Proceedings of the IEEE}, number = {2}, pages = {257--286}, title = {{A tutorial on hidden Markov models and selected applications in speech recognition}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=18626}, volume = {77}, year = {1989} } @article{Ortony1990, abstract = {A widespread assumption in theories of emotion is that there exists a small set of basic emotions. From a biological perspective, this idea is manifested in the belief that there might be neurophysiological and anatomical substrates corresponding to the basic emotions. From a psychological perspective, basic emotions are often held to be the primitive building blocks of other, nonbasic emotions. The content of such claims is examined, and the results suggest that there is no coherent nontrivial notion of basic emotions as the elementary psychological primitives in terms of which other emotions can be explained. Thus, the view that there exist basic emotions out of which all other emotions are built, and in terms of which they can be explained, is questioned, raising the possibility that this position is an article of faith rather than an empirically or theoretically defensible basis for the conduct of emotion research. This suggests that perhaps the notion of basic emotions will not lead to significant progress in the field. An alternative approach to explaining the phenomena that appear to motivate the postulation of basic emotions is presented.}, author = {Ortony, A and Turner, T J}, institution = {Institute for the Learning Sciences, Northwestern University, Evanston, Illinois 60201.}, journal = {Psychological Review}, number = {3}, pages = {315--331}, pmid = {1669960}, publisher = {Citeseer}, title = {{What's basic about basic emotions?}}, url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/0033-295X.97.3.315}, volume = {97}, year = {1990} } @incollection{Mora1999, abstract = {Beliefs-Desires-Intentions models (or BDI models) of agents have been around for quit a long time. The purpose of these models is to characterize agents using anthropomorphic notions, such as mental states and actions. How- ever, despite the fact that many systems have been developed based on these mod- els, it is a general concern that there is a gap between those powerful BDI logics and practical systems. The purpose of this paper is to present a BDI model that, besides being a formal model of agents, is also suitable to be used to implement agents. Instead of defining a new BDI logic or choosing an existing one, and ex- tending it with an operational model, we define the notions of belief, desires and intentions using a logic formalism that is both well-defined and computational.}, author = {Mora, M and Lopes, J and Viccariz, R and Coelho, H}, booktitle = {Intelligent Agents V: Agents Theories, Architectures, and Languages}, chapter = {Section I}, doi = {10.1007/3-540-49057-4\_2}, editor = {Muller, J.P.}, file = {::}, keywords = {BDI models,agent architectures,logic program- ming.,mental states modeling}, pages = {11--27}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{BDI models and systems: Reducing the gap}}, url = {http://www.springerlink.com/index/m674631247x60251.pdf}, year = {1999} } @article{Bryant1982, abstract = {56 1st, 115 4th, and 87 7th graders were administered a newly devised index of empathy partly based on A. Mehrabian and N. Epstein's (see record 1973-23075-001) measure. Item means, item-total correlations, testretest reliabilities, correlations of empathy with aggressiveness and acceptance of individual differences, and correlations with other existing measures of empathy as well as to social desirability response set and reading achievement formed the basis of internal, discriminant, convergent, and general construct validation. The measure demonstrated satisfactory reliability and preliminary construct validity. The study of developmental aspects of empathic arousal toward peers of different sexes is indicated. (38 ref) (PsycINFO Database Record (c) 2010 APA, all rights reserved)}, author = {Bryant, Brenda K}, doi = {10.2307/1128984}, issn = {00093920}, journal = {Child Development}, number = {2}, pages = {413--425}, publisher = {Blackwell Publishing on behalf of the Society for Research in Child Development}, title = {{An Index of Empathy for Children and Adolescents}}, volume = {53}, year = {1982} } @article{Miller2003, author = {Miller, WR and Moyers, TB and Ernst, Denise}, file = {::}, journal = {World Wide Web Published Online}, keywords = {MISC 2.0}, title = {{Manual for the motivational interviewing skill code (MISC)}}, url = {http://casaa.unm.edu/tandc.html}, year = {2003} } @article{Scherer2007, abstract = {In earlier work, the authors analyzed emotion portrayals by professional actors separately for facial expression, vocal expression, gestures, and body movements. In a secondary analysis of the combined data set for all these modalities, the authors now examine to what extent actors use prototypical multimodal configurations of expressive actions to portray different emotions, as predicted by basic emotion theories claiming that expressions are produced by fixed neuromotor affect programs. Although several coherent unimodal clusters are identified, the results show only 3 multimodal clusters: agitation, resignation, and joyful surprise, with only the latter being specific to a particular emotion. Finding variable expressions rather than prototypical patterns seems consistent with the notion that emotional expression is differentially driven by the results of sequential appraisal checks, as postulated by componential appraisal theories.}, author = {Scherer, Klaus R and Ellgring, Heiner}, doi = {10.1037/1528-3542.7.1.158}, file = {::}, issn = {1528-3542}, journal = {Emotion (Washington, D.C.)}, keywords = {Adult,Affect,Facial Expression,Female,Gestures,Humans,Judgment,Male,Psychomotor Performance,Speech Acoustics,Voice}, month = feb, number = {1}, pages = {158--71}, pmid = {17352571}, title = {{Multimodal expression of emotion: affect programs or componential appraisal patterns?}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17352571}, volume = {7}, year = {2007} } @book{Hoffman2000, author = {Hoffman, Martin L.}, booktitle = {Development}, isbn = {052158034X}, pages = {2}, publisher = {Cambridge University Press}, title = {{Empathy and Moral Development: Implications for Caring and Justice}}, year = {2000} } @inproceedings{Lisetti2008, abstract = {In this article, we explore how Embodied Conversational Agents (ECAs) or avatars could be used as social orthotics defined as therapeutic computer- based social companions aimed at promoting healthy behaviors. We review some of the latest related progress and identify specific features of ECAs that are important – if not necessary – to include in the design of social orthotic systems.}, author = {Lisetti, Christine L}, booktitle = {Proceedings of the CHI 2008 Conference Workshop on Technology in Mental Health}, file = {::}, keywords = {affective computing,agents,avatars,embodied conversational,psychotherapy,social orthotics}, pages = {1--12}, publisher = {ACM}, title = {{Embodied Conversational Agents for Psychotherapy}}, year = {2008} } @inproceedings{Boukricha2007, abstract = {Addressing user’s emotions in human-computer interaction significantly enhances the believability and lifelikeness of virtual humans. Emotion recognition and interpretation is realized in our approach by integrating empathy as a designated process within the agent’s cognitive architecture. In this paper we describe this empathy process which comprises of two interconnected components: a belief-desire-intention (BDI) based cognitive component and an affective component based on the emotion simulation system of the virtual human Max.}, address = {Osnabr\"{u}ck, Germany}, author = {Boukricha, Hana and Becker-Asano, Christian}, booktitle = {Proceedings of the 2nd Workshop at KI2007 on Emotion and Computing – Current Research and Future Impact}, editor = {{Dirk Reichardt} and Levi, Paul}, file = {::}, pages = {23--28}, title = {{Simulating empathy for the virtual human max}}, url = {http://wwwlehre.dhbw-stuttgart.de/~reichard/itemotion/2007/}, year = {2007} } @article{Breazeal2005, author = {Breazeal, Cynthia and Buchsbaum, Daphna and Gray, Jesse and Gatenby, David and Blumberg, Bruce}, file = {::}, journal = {Artificial Life}, number = {1-2}, pages = {31--62}, publisher = {MIT Press}, title = {{Learning from and about others: Towards using imitation to bootstrap the social understanding of others by robots}}, volume = {11}, year = {2005} } @article{Szymanski2007, abstract = {The first step towards creating avatars with human-like artificial minds is to give them human-like memory structures with an access to general knowledge about the world. This type of knowledge is stored in semantic memory. Although many approaches to modeling of semantic memories have been proposed they are not very useful in real life applications because they lack knowledge comparable to the common sense that humans have, and they cannot be implemented in a computationally efficient way. The most drastic simplification of semantic memory leading to the simplest knowledge representation that is sufficient for many applications is based on the Concept Description Vectors (CDVs) that store, for each concept, an information whether a given property is applicable to this concept or not. Unfortunately even such simple information about real objects or concepts is not available. Experiments with automatic creation of concept description vectors from various sources, including ontologies, dictionaries, encyclopedias and unstructured text sources are described. Haptek-based talking head that has an access to this memory has been created as an example of a humanized interface (HIT) that can interact with web pages and exchange information in a natural way. A few examples of applications of an avatar with semantic memory are given, including the twenty questions game and automatic creation of word puzzles.}, author = {Szymanski, Julian and Sarnatowicz, Tomasz and Duch, Wlodzislaw}, file = {::}, journal = {Journal of Ubiquitous Computing and Intelligence}, keywords = {avatars,cyberspace,dialogue systems,natural language processing,semantic memory,word games}, title = {{Towards Avatars with Artificial Minds : Role of Semantic Memory}}, url = {http://cogprints.org/5357/}, year = {2007} } @article{Russell1980, abstract = {Factor-analytic evidence has led most psychologists to describe affect as a set of dimensions, such as displeasure, distress, depression, excitement, and so on, with each dimension varying independently of the others. However, there is other evidence that rather than being independent, these affective dimensions are interrelated in a highly systematic fashion. The evidence suggests that these interrelationships can be represented by a spatial model in which affective concepts fall in a circle in the following order: pleasure (0), excitement (45), arousal (90), distress (135), displeasure (180), depression (225), sleepiness (270), and relaxation (315). This model was offered both as a way psychologists can represent the structure of affective experience, as assessed through self-report, and as a representation of the cognitive structure that laymen utilize in conceptualizing affect. Supportive evidence was obtained by scaling 28 emotion-denoting adjectives in 4 different ways: R. T. Ross's (1938) technique for a circular ordering of variables, a multidimensional scaling procedure based on perceived similarity among the terms, a unidimensional scaling on hypothesized pleasure–displeasure and degree-of-arousal dimensions, and a principal-components analysis of 343 Ss' self-reports of their current affective states. (70 ref) (PsycINFO Database Record (c) 2010 APA, all rights reserved)}, author = {Russell, James A}, doi = {10.1037/h0077714}, file = {::}, journal = {Journal of Personality and Social Psychology}, number = {6}, pages = {1161--1178}, title = {{A circumplex model of affect}}, url = {http://psycnet.apa.org/psycinfo/1981-25062-001}, volume = {39}, year = {1980} } @incollection{Cooper2000, abstract = {This paper considers how research into empathy in teaching and learning can inform the research into intelligent systems and intelligent agents embedded in educational applications. It also relates this research to some analysis of classroom practice completed as part of the EU funded NIMIS project. The project is developing three applications, one of which aims to support writing development with young children aged 5-6 years based on a cartoon format. The NIMIS classroom as a whole is designed to enhance and augment existing classroom practices and to foster collaboration by non-intrusive hardware and intuitive hardware and software interfaces. To this end it seeks to enhance both human and electronic communication in the classroom. Empathy is central to ensuring the quality of human communication and personal development. This paper suggests that intelligent systems that can consider more carefully the processes and feelings involved in human interactions in teaching and learning, may promote higher quality support for students in classrooms.}, author = {Cooper, Bridget and Brna, Paul and Martins, Alex}, booktitle = {Affective Interactions Towards a New Generation of Computer Interfaces}, doi = {10.1007/10720296\_3}, editor = {Paiva, Ana}, file = {::}, isbn = {978-3-540-41520-6}, pages = {21--34}, publisher = {Springer Berlin / Heidelberg}, title = {{Effective affective in intelligent systems–building on evidence of empathy in teaching and learning}}, url = {http://www.springerlink.com/index/j8v0l230t3503367.pdf}, volume = {1814/2000}, year = {2000} } @article{Kuntsche2006, abstract = {The aim was to review the empirical research carried out over the last 15 years on the characteristics of young people (10- to 25-year olds) who have specific motives for drinking. In a computer-assisted search of relevant literature, 82 studies were identified. Concerning demographic factors, a developmental trend was found - from general, undifferentiated drinking motives in late childhood and early adolescence to more gender-specific drinking motives in subsequent years. With regard to personality factors, two specific patterns can be distinguished: extraversion and sensation-seeking correlate with enhancement motives, while neuroticism and anxiety correlate most strongly with coping motives. For contextual factors, drinking motives were found to vary across countries but not among different ethnic groups in the same culture. Based on these results, preventive strategies should take into account general, undifferentiated drinking motivation in late childhood, and social and enhancement motives in adolescence, particularly among boys. Findings on personality indicate that it would be useful to focus on extraverted, sensation-seeking boys who drink for enhancement motives and neurotic, anxious girls who drink for coping motives.}, author = {Kuntsche, Emmanuel and Knibbe, Ronald and Gmel, Gerhard and Engels, Rutger}, doi = {10.1016/j.addbeh.2005.12.028}, file = {::}, issn = {0306-4603}, journal = {Addictive behaviors}, keywords = {Adaptation, Psychological,Adolescent,Adult,Age Factors,Alcohol Drinking,Alcohol Drinking: psychology,Anxiety,Anxiety: etiology,Child,Culture,Humans,Motivation,Personality,Sex Factors}, month = oct, number = {10}, pages = {1844--57}, pmid = {16460883}, title = {{Who drinks and why? A review of socio-demographic, personality, and contextual issues behind the drinking motives in young people.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/16460883}, volume = {31}, year = {2006} } @inproceedings{Gordon1985, abstract = {Despite the almost complete lack of research addressing a theoretical understanding of empathy or ways to increase human empathy, empathy is a central component of effective human communication. Seen as a key social science phenomenon, it is viewed, along with power, as an inextricable component of human dynamics, and, in its relationship with altruism, possibly plays a causal role. A problem with research on empathy has been a lack of conceptual clarity. Three ways to improve empathetic listening are to avoid judgment, give the speaker time to speak without interruption, and focus on the speaker. Many of the helping professions have attempted training programs aimed at increasing the empathetic communication skills of practitioners in these fields. However, being told to listen empathetically is not the same as being taught to listen with empathy; and in critique of the empathy skills programs that are conducted within the helping professions, a significantly raised test score does not mean that empathy has been attained. Although empathetic communication is a complex subject matter, skills associated with empathy and active listening have been perceived as being more important than skills associated with critical or deliberative listening.}, address = {Baguio, Philippines}, author = {Gordon, Ronald D.}, booktitle = {International Conference of the World Communication Association}, file = {::}, keywords = {Communication (thought transfer),empathhy,interpersonal communication,listening,listening habits,listening skills,speech communication}, pages = {1--16}, title = {{Empathy: The State of the Art and Science}}, year = {1985} } @article{Pelachaud2009, abstract = {Over the past few years we have been developing an expressive embodied conversational agent system. In particular, we have developed a model of multimodal behaviours that includes dynamism and complex facial expressions. The first feature refers to the qualitative execution of behaviours. Our model is based on perceptual studies and encompasses several parameters that modulate multimodal behaviours. The second feature, the model of complex expressions, follows a componential approach where a new expression is obtained by combining facial areas of other expressions. Lately we have been working on adding temporal dynamism to expressions. So far they have been designed statically, typically at their apex. Only full-blown expressions could be modelled. To overcome this limitation, we have defined a representation scheme that describes the temporal evolution of the expression of an emotion. It is no longer represented by a static definition but by a temporally ordered sequence of multimodal signals.}, author = {Pelachaud, Catherine}, doi = {10.1098/rstb.2009.0186}, file = {::}, issn = {1471-2970}, journal = {Philosophical transactions of the Royal Society of London. Series B, Biological sciences}, keywords = {Computer Simulation,Emotions,Emotions: physiology,Facial Expression,Humans,Models, Psychological,Social Behavior}, month = dec, number = {1535}, pages = {3539--48}, pmid = {19884148}, title = {{Modelling multimodal expression of emotion in a virtual agent.}}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=2781894\&tool=pmcentrez\&rendertype=abstract}, volume = {364}, year = {2009} } @incollection{Preston2007, author = {Preston, SD}, booktitle = {Empathy in mental illness}, chapter = {23}, editor = {Farrow, T. and Woodruff, P.}, file = {::}, isbn = {0521847346}, pages = {428--446}, publisher = {Cambridge University Press}, title = {{A perception-action model for empathy}}, url = {http://www-personal.umich.edu/~prestos/Downloads/Preston2007\_MI.pdf}, year = {2007} } @inproceedings{Denef2009, abstract = {This thesis investigates the design of human computer interaction techniques for ubiquitous computing solutions in firefighting.}, address = {Uppsala, Sweden}, author = {Denef, Sebastian}, booktitle = {INTERACT '09 Proceedings of the 12th IFIP TC 13 International Conference on Human-Computer Interaction: Part II}, doi = {10.1007/978-3-642-03658-3\_97}, editor = {Gross, Tom and Gulliksen, Jan and Kotz\'{e}, Paula and Oestreicher, Lars and Palanque, Philippe and Prates, Raquel Oliveira and Winckler, Marco}, pages = {864--867}, publisher = {Springer Berlin / Heidelberg}, title = {{Human-Computer Interaction Techniques in Firefighting}}, url = {http://www.springerlink.com/index/n0688783567n3251.pdf http://dl.acm.org/citation.cfm?id=1616339}, year = {2009} } @inproceedings{Vinciarelli2008, address = {Chania, Crete, Greece}, author = {Vinciarelli, Alessandro and Pantic, Maja and Bourlard, Herv\'{e} and Pentland, Alex}, booktitle = {Proceedings of the 10th international conference on Multimodal interfaces - IMCI '08}, doi = {10.1145/1452392.1452405}, isbn = {9781605581989}, keywords = {computer vision,definitely a key ability,difference between,life being involved in,social behaviour anal-,social intelligence is,social interactions,social signal processing,speech analysis,that can make the,ysis}, pages = {61}, publisher = {ACM Press}, title = {{Social signals, their function, and automatic analysis}}, url = {http://portal.acm.org/citation.cfm?doid=1452392.1452405}, year = {2008} } @article{Boukricha2011, abstract = {Allowing virtual humans to align to others’ perceived emotions is believed to enhance their cooperative and communicative social skills. In our work, emotional alignment is realized by endowing a virtual human with the ability to empathize. Recent research shows that humans empathize with each other to different degrees depending on several factors including, among others, their mood, their personality, and their social relationships. Although providing virtual humans with features like affect, personality, and the ability to build social relationships, little attention has been devoted to the role of such features as factors modulating their empathic behavior. Supported by psychological models of empathy, we propose an approach to model empathy for the virtual human EMMA—an Empathic MultiModal Agent—consisting of three processing steps: First, the Empathy Mechanism by which an empathic emotion is produced. Second, the Empathy Modulation by which the empathic emotion is modulated. Third, the Expression of Empathy by which EMMA’s multiple modalities are triggered through the modulated empathic emotion. The proposed model of empathy is illustrated in a conversational agent scenario involving the virtual humans MAX and EMMA.}, author = {Boukricha, Hana and Wachsmuth, Ipke}, doi = {10.1007/s13218-011-0109-8}, file = {::}, issn = {0933-1875}, journal = {KI - K\"{u}nstliche Intelligenz}, keywords = {agent-agent interaction,empathic virtual humans,human-agent,internal simulation}, month = may, number = {3}, pages = {195--204}, title = {{Empathy-Based Emotional Alignment for a Virtual Human: A Three-Step Approach}}, url = {http://www.springerlink.com/content/9322738p4101p94w/}, volume = {25}, year = {2011} } @inproceedings{Polajnar2011, author = {Polajnar, Jernej and Dalvandi, B. and Polajnar, D.}, booktitle = {Cognitive Informatics \& Cognitive Computing (ICCI'CC'11), 2011 10th IEEE International Conference on}, file = {::}, isbn = {9781457716973}, pages = {96--102}, publisher = {IEEE}, title = {{Does empathy between artificial agents improve agent teamwork?}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=6016126}, year = {2011} } @book{Ekman2002, address = {Salt Lake City, UT}, author = {Ekman, Paul and Freisen, Wallace V. and Hager, Joseph C}, booktitle = {A Human Face}, edition = {2nd}, institution = {Consulting Psychologists}, isbn = {0931835011}, number = {4}, pages = {4--5}, publisher = {Research Nexus eBook}, title = {{Facial Action Coding System}}, volume = {160}, year = {2002} } @article{Lee2009, author = {Lee, Jina and Prendinger, Helmut}, file = {::}, isbn = {9781424447992}, journal = {Affective Computing}, title = {{Learning models of speaker head nods with affective information}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5349543}, year = {2009} } @article{Mandryk2006, abstract = {Emerging technologies offer exciting new ways of using entertainment technology to create fantastic play experiences and foster interactions between players. Evaluating entertainment technology is challenging because success isn’t defined in terms of productivity and performance, but in terms of enjoyment and interaction. Current subjective methods of evaluating entertainment technology aren’t sufficiently robust. This paper describes two experiments designed to test the efficacy of physiological measures as evaluators of user experience with entertainment technologies. We found evidence that there is a different physiological response in the body when playing against a computer versus playing against a friend. These physiological results are mirrored in the subjective reports provided by the participants. In addition, we provide guidelines for collecting physiological data for user experience analysis, which were informed by our empirical investigations. This research provides an initial step towards using physiological responses to objectively evaluate a user’s experience with entertainment technology.}, author = {Mandryk, Regan L. and Inkpen, Kori M. and Calvert, Thomas W.}, doi = {10.1080/01449290500331156}, journal = {Behaviour \& Information Technology}, number = {2}, pages = {141--158}, title = {{Using psychophysiological techniques to measure user experience with entertainment technologies}}, url = {http://www.tandfonline.com/doi/abs/10.1080/01449290500331156}, volume = {25}, year = {2006} } @article{Peters2005, abstract = {One of the major problems of user's interaction with Embodied Conversational Agents (ECAs) is to have the conversation last more than few second: after being amused and intrigued by the ECAs, users may find rapidly the restrictions and limitations of the dialog systems, they may perceive the repetition of the ECAs animation, they may find the behaviors of ECAs to be inconsistent and implausible, etc. We believe that some special links, or bonds, have to be established between users and ECAs during interaction. It is our view that showing and/or perceiving interest is the necessary premise to establish a relationship. In this paper we present a model of an ECA able to establish, maintain and end the conversation based on its perception of the level of interest of its interlocutor.}, author = {Peters, Christopher and Pelachaud, Catherine and Bevacqua, Elisabetta and Mancini, Maurizio and Poggi, Isabella}, doi = {10.1007/11550617\_20}, editor = {Panayiotopoulos, Themis and Gratch, Jonathan and Aylett, Ruth and Ballin, Daniel and Olivier, Patrick and Rist, Thomas}, isbn = {9783540287384}, journal = {Intelligent Virtual Agents}, pages = {229--240}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, title = {{A model of attention and interest using gaze behavior}}, url = {http://www.springerlink.com/index/8gqh2c9phmhb12jd.pdf}, volume = {3661}, year = {2005} } @book{Fussell2002, author = {Fussell, Susan R.}, editor = {Fussell, Susan R.}, file = {::}, isbn = {9780805836905}, pages = {294}, publisher = {Lawrence Erlbaum Associates}, title = {{The verbal communication of emotions: Interdisciplinary perspectives}}, url = {http://books.google.com/books?id=MHea6DYYfEgC}, year = {2002} } @article{Blair2005, abstract = {Empathy is a lay term that is becoming increasingly viewed as a unitary function within the field of cognitive neuroscience. In this paper, a selective review of the empathy literature is provided. It is argued from this literature that empathy is not a unitary system but rather a loose collection of partially dissociable neurocognitive systems. In particular, three main divisions can be made: cognitive empathy (or Theory of Mind), motor empathy, and emotional empathy. The two main psychiatric disorders associated with empathic dysfunction are considered: autism and psychopathy. It is argued that individuals with autism show difficulties with cognitive and motor empathy but less clear difficulties with respect to emotional empathy. In contrast, individuals with psychopathy show clear difficulties with a specific form of emotional empathy but no indications of impairment with cognitive and motor empathy.}, author = {Blair, R J R}, doi = {10.1016/j.concog.2005.06.004}, issn = {1053-8100}, journal = {Consciousness and cognition}, keywords = {Affect,Affect: physiology,Autistic Disorder,Autistic Disorder: physiopathology,Brain,Brain: physiopathology,Cognition,Cognition: physiology,Empathy,Humans,Mental Disorders,Mental Disorders: physiopathology,Social Perception}, month = dec, number = {4}, pages = {698--718}, pmid = {16157488}, title = {{Responding to the emotions of others: dissociating forms of empathy through the study of typical and psychiatric populations}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/16157488}, volume = {14}, year = {2005} } @article{Ekman1993, abstract = {Cross-cultural research on facial expression and the developments of methods to measure facial expression are briefly summarized. What has been learned about emotion from this work on the face is then elucidated. Four questions about facial expression and emotion are discussed: What information does an expression typically convey? Can there be emotion without facial expression? Can there be a facial expression of emotion without emotion? How do individuals differ in their facial expressions of emotion?}, author = {Ekman, Paul}, institution = {Human Interaction Laboratory, University of California, San Francisco 94143.}, journal = {American Psychologist}, number = {4}, pages = {384--392}, pmid = {8512154}, publisher = {American Psychological Association}, title = {{Facial expression and emotion.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/8512154}, volume = {48}, year = {1993} } @article{Johnstone2000, abstract = {This chapter provides a comprehensive overview of the current state of the literature on the vocal communication of emotion. It highlights some of the many evolutionary, physiological, cognitive, social, and cultural factors which shape the way humans express and perceive emotions in speech. With such a large and seemingly disparate number of determinants, it might seem as if the topic were too messy to expect any invariance in empirical findings. Perhaps surprisingly however, the summary of research into the production and perception of emotional speech has revealed considerable consistency. On the production side, the evidence is starting to accumulate that humans consistently modify their speech in specific ways to express different emotions. The major acoustic parameters are described and the relevant literature reviewed. Results of perception studies indicate that emotions expressed in speech are to a large extent successfully detected by a variety of populations, on the basis of an experimentally identifiable set of acoustic parameters. The differences in recognition accuracy between different emotions are discussed. The consistency in the results is no doubt partly because most research to date has been limited to settings in which many of the factors described above have been eliminated or controlled for. In addition to further refinement of analysis techniques and a focus on real, as well as acted, emotional speech, there is clearly a need for studies that better quantify the relative contribution of culture, language and social strategy to the vocal comunication of emotion. To address these issues in a manner that allows results from different studies to be integrated and compared, a coordinated, interdisciplinary approach to research on the vocal communication of emotion will be required.}, author = {Johnstone, Tom and Scherer, Klaus R.}, chapter = {14}, editor = {Lewis, M and Haviland-Jones, J M}, journal = {Handbook of emotions}, number = {1-2}, pages = {220--235}, publisher = {The Guilford Press}, title = {{Vocal communication of emotion}}, url = {http://centaur.reading.ac.uk/4362/}, volume = {2}, year = {2000} } @article{Hatfield1994, abstract = {(From the introduction) The focus in this text is on rudimentary or primitive emotional contagion-that which is relatively automatic, unintentional, uncontrollable, and largely inaccessible to conversant awareness. This is defined as the tendency to automatically mimic and synchronize facial expressions, vocalizations, postures, and movements with those of another person, and consequently, to converge emotionally. Emotional contagion may well be important in personal relationships: It fosters behavioral synchrony and the tracking of the feelings of others moment to moment, even when individuals are not explicitly attending to this information. (PsycINFO Database Record (c) 2008 APA}, author = {Hatfield, Elaine and Cacioppo, John T and Rapson, Richard L}, doi = {10.1111/1467-8721.ep10770953}, editor = {Craighead, W E and Nemeroff, C B}, isbn = {0521449480}, issn = {09637214}, journal = {Current Directions in Psychological Science}, number = {3}, pages = {96--99}, pmid = {20544488}, publisher = {Cambridge University Press}, title = {{Emotional contagion}}, url = {http://dx.doi.org/10.1111/j.1467-8721.1993.tb00114.x}, volume = {2}, year = {1994} } @article{Sabourin, author = {Sabourin, Jennifer and Mott, Bradford and Lester, James}, file = {::}, journal = {lorentzcenter.nl}, keywords = {empathetic virtual agents,pedagogical agents,virtual learning}, title = {{Computational Models of Affect and Empathy for Pedagogical Virtual Agents}}, url = {http://www.lorentzcenter.nl/lc/web/2011/464/presentations/Sabourin.pdf} } @article{Boukricha2011, abstract = {Allowing virtual humans to align to others’ perceived emotions is believed to enhance their cooperative and communicative social skills. In our work, emotional alignment is realized by endowing a virtual human with the ability to empathize. Recent research shows that humans empathize with each other to different degrees depending on several factors including, among others, their mood, their personality, and their social relationships. Although providing virtual humans with features like affect, personality, and the ability to build social relationships, little attention has been devoted to the role of such features as factors modulating their empathic behavior. Supported by psychological models of empathy, we propose an approach to model empathy for the virtual human EMMA—an Empathic MultiModal Agent—consisting of three processing steps: First, the Empathy Mechanism by which an empathic emotion is produced. Second, the Empathy Modulation by which the empathic emotion is modulated. Third, the Expression of Empathy by which EMMA’s multiple modalities are triggered through the modulated empathic emotion. The proposed model of empathy is illustrated in a conversational agent scenario involving the virtual humans MAX and EMMA.}, author = {Boukricha, Hana and Wachsmuth, Ipke}, doi = {10.1007/s13218-011-0109-8}, issn = {0933-1875}, journal = {KI - K\"{u}nstliche Intelligenz}, keywords = {agent-agent interaction,empathic virtual humans,human-agent,internal simulation}, month = may, number = {3}, pages = {195--204}, title = {{Empathy-Based Emotional Alignment for a Virtual Human: A Three-Step Approach}}, url = {http://www.springerlink.com/content/9322738p4101p94w/}, volume = {25}, year = {2011} } @article{Happ2011, author = {Happ, Christian and Melzer, Andr\'{e}}, file = {::}, journal = {Ifip International Federation For Information Processing}, keywords = {1,1 prosocial and antisocial,aggression,anderson and his colleagues,confirmed that video game,effects of video games,empathy,furthermore,in a recent overview,prosocial behavior,related to indicators of,video games,violence exposure is positively}, pages = {371--374}, title = {{Bringing Empathy into Play: On the Effects of Empathy in Violent and Nonviolent Video Games}}, url = {http://www.springerlink.com/index/P76556V1HN316RK6.pdf}, year = {2011} } @article{Pereira2011, author = {Pereira, A. and Leite, Iolanda and Mascarenhas, Samuel and Martinho, Carlos and Paiva, Ana}, file = {::}, journal = {Human-Robot Personal Relationships}, keywords = {companionship,empathy,human-robot interaction}, pages = {130--138}, publisher = {Springer}, title = {{Using empathy to improve human-robot relationships}}, url = {http://www.springerlink.com/index/R468X62581620V62.pdf}, year = {2011} } @inproceedings{Liu2010a, abstract = {Emotions accompany everyone in the daily life, playing a key role in non-verbal communication, and they are essential to the understanding of human behavior. Emotion recognition could be done from the text, speech, facial expression or gesture. In this paper, we concentrate on recognition of “inner” emotions from electroencephalogram (EEG) signals as humans could control their facial expressions or vocal intonation. The need and importance of the automatic emotion recognition from EEG signals has grown with increasing role of brain computer interface applications and development of new forms of human-centric and human-driven interaction with digital media. We propose fractal dimension based algorithm of quantification of basic emotions and describe its implementation as a feedback in 3D virtual environments. The user emotions are recognized and visualized in real time on his/her avatar adding one more so-called “emotion dimension” to human computer interfaces.}, address = {Singapore}, author = {Liu, Yisi and Sourina, Olga and Nguyen, Minh Khoa}, booktitle = {International Conference on Cyberworlds (CW)}, doi = {10.1109/CW.2010.37}, isbn = {978-1-4244-8301-3}, keywords = {- emotion recognition,bci,eeg,emotion visualization,fractal dimension,hci}, month = oct, pages = {262--269}, publisher = {IEEE Computer Society}, title = {{Real-Time EEG-Based Human Emotion Recognition and Visualization}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5656346}, year = {2010} } @article{Poggi2000, abstract = {Our goal is to create an intelligent 3D agent able to send complex, natural messages to users and, in the future, to converse with them. We look at the relationship between the agents communicative intentions and the way that these intentions are expressed into verbal and nonverbal messages. In this paper, we concentrate on the study and generation of coordinated linguistic and gaze communicative acts. In this view we analyse gaze signals according to their functional meaning rather than to their physical actions. We propose a formalism where a communicative act is represented by two elements: a meaning (that corresponds to a set of goals and beliefs that the agent has the purpose to transmit to the interlocutor) and a signal, that is the nonverbal expression of that meaning. We also outline a methodology to generate messages that coordinate verbal with nonverbal signals.}, author = {Poggi, Isabella and Pelachaud, Catherine and {De Rosis}, Fiorella}, issn = {09217126}, journal = {Ai Communications}, number = {3}, pages = {169--181}, publisher = {IOS Press}, title = {{Eye communication in a conversational 3D synthetic agent}}, url = {http://portal.acm.org/citation.cfm?id=1216435.1216439}, volume = {13}, year = {2000} } @inproceedings{Wright2008, author = {Wright, Peter and McCarthy, J.}, booktitle = {Proceeding of the twenty-sixth annual SIGCHI conference on Human factors in computing systems}, isbn = {9781605580111}, pages = {637--646}, publisher = {ACM}, title = {{Empathy and experience in HCI}}, url = {http://dl.acm.org/citation.cfm?id=1357156}, year = {2008} } @inproceedings{Gama2011, abstract = {Over the last decade extensive research has been conducted in the area of conversational agents focusing in many different aspects of these agents. In this research, and aiming at building agents that maintain a social connection with users, empathy has been one of those areas, as it plays a leading role in the establishment of social relationships. In this paper we present a relationship model of empathy that takes advantage of Social Penetration Theory's concepts for relationship building. This model has been implemented into an agent that attempts to establish a relationship with the user, expressing empathy both verbally and visually. The visual expression of empathy consists of facial expression and physical proximity representation. The user tests performed showed that while users were able to develop a simple relationship with the agents, they however developed stronger relationships with a version of the agent that is most visually expressive and takes advantage of the proximity element, confirming the significance of our model based on social penetration theory may have and, consequently, the importance of the visual representation of empathic responses.}, address = {Memphis, TN, USA}, author = {Gama, Sandra and Barata, Gabriel and Gon\c{c}alves, D. and Prada, R. and Paiva, Ana}, booktitle = {ACII'11 Proceedings of the 4th international conference on Affective computing and intelligent interaction - Volume Part I}, doi = {10.1007/978-3-642-24600-5\_54}, editor = {{D'Mello, Sidney K. and Graesser, Arthur C. and Schuller, Bj\"{o}rn and Martin}, Jean-Claude}, file = {::}, keywords = {affective computing,conversational agent,empathic agent}, pages = {507--516}, publisher = {Springer Berlin / Heidelberg}, title = {{SARA: social affective relational agent: a study on the role of empathy in artificial social agents}}, url = {http://www.springerlink.com/content/g0433kx744258w62/}, year = {2011} } @article{Cogger1982, author = {Cogger, J W}, journal = {The Personnel journal}, number = {11}, pages = {840--843}, pmid = {10258019}, title = {{Are you a skilled interviewer?}}, volume = {61}, year = {1982} } @book{Mowrer1960, address = {New York}, author = {Mowrer, Orval Hobart}, pages = {555}, publisher = {Wiley}, title = {{Learning theory and behavior}}, year = {1960} } @inproceedings{Zanbaka2007, abstract = {Do human-human social interactions carry over to human- virtual human social interactions? How does this affect future interface designers? We replicated classical tests of social influence known as the social facilitation and inhibition effects. Social facilitation/inhibition theory states that when in the presence of others, people perform simple tasks better and complex tasks worse. Participants were randomly assigned to perform both simple and complex tasks alone and in the presence of either a real human, a projected virtual human, or a virtual human in a head- mounted display. Our results showed participants were inhibited by the presence of others, whether real or virtual. That is, participants performed worse on the complex task, both in terms of percent correct and reaction times, when in the presence of others than when alone. Social facilitation did not occur with the real or virtual human. We discuss these results and their implications for future interface designers.}, address = {San Jose, California, USA}, author = {Zanbaka, Catherine and Ulinski, Amy and Goolkasian, Paula and Hodges, Larry F}, booktitle = {CHI 2007 Proceedings of Social Influence}, isbn = {9781595935939}, keywords = {Virtual humans,avatars,experimental studies,human-computer interaction,interface agents,social facilitation and inhibition,social influence,social psychology.}, pages = {1561--1570}, publisher = {ACM}, title = {{Social Responses to Virtual Humans : Implications for Future Interface Design}}, year = {2007} } @article{Stockwell1994, abstract = {The concept of the Alcohol Dependence Syndrome has been influential in the field of alcohol studies in the 1980s. The Severity of Alcohol Dependence Questionnaire (SADQ) is one of a generation of alcohol problem scales developed to measure degree of dependence rather than presence or absence of 'alcoholism'. This paper describes the development of a form of the SADQ for community samples of drinkers (SADQ-C) and its relationship to a brief scale designed to measure impaired control over drinking. In a sample of 52 problem drinkers, SADQ and SADQ-C correlated almost perfectly (r = 0.98). In a larger sample of 197 attenders at a controlled drinking clinic, Principal Components Analysis revealed one major factor accounting for 71.7\% of the total variance. High internal reliability was indicated with a Cronbach's Alpha of 0.98. Application of this instrument in a random survey of Western Australian households is then described. It was necessary to remove items relating to 'reinstatement of dependence' for this sample. A single major factor was identified by principal components analysis, accounting for 69.1\% of the total variance. In both the clinic and the community samples SADQ-C scores correlated highly with Impairment of Control scores. The findings are interpreted as supporting the view that there is a single dimension of alcohol dependence upon which all persons who drink alcohol with any regularity may be located.}, author = {Stockwell, T and Sitharthan, T and McGrath, D and Lang, E}, institution = {National Centre for Research into the Prevention of Drug Abuse, Curtin University of Technology, Perth, Western Australia.}, journal = {Addiction Abingdon England}, keywords = {adolescent,adult,aged,alcohol drinking,alcohol drinking adverse effects,alcohol drinking epidemiology,alcohol drinking psychology,alcoholism,alcoholism classification,alcoholism diagnosis,alcoholism epidemiology,alcoholism psychology,cross sectional studies,female,humans,incidence,internal external control,male,middle aged,psychometrics,reproducibility results,substance withdrawal syndrome,substance withdrawal syndrome classification,substance withdrawal syndrome diagnosis,substance withdrawal syndrome epidemiology,substance withdrawal syndrome psychology,western australia,western australia epidemiology}, number = {2}, pages = {167--174}, pmid = {8173482}, title = {{The measurement of alcohol dependence and impaired control in community samples.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/8173482}, volume = {89}, year = {1994} } @article{Eisenberg1983, abstract = {Reviews the literature on sex differences in empathy (defined as vicarious affective responding to the emotional state of another) and related capacities (affective role taking and decoding of nonverbal cues). The literature is discussed according to method used to assess empathy and affective role taking. Where appropriate, meta-analyses were also computed. In general, sex differences in empathy were found to be a function of the methods used to assess empathy. There was a large sex difference favoring women when the measure of empathy was self-report scales; moderate differences (favoring females) were found for reflexive crying and self-report measures in laboratory situations; and no sex differences were evident when the measure of empathy was either physiological or unobtrusive observations of nonverbal reactions to another's emotional state. Moreover, few sex differences were found for children's affective role taking and decoding abilities. (156 ref) (PsycINFO Database Record (c) 2006 APA, all rights reserved), (C) 1983 by the American Psychological Association}, author = {Eisenberg, Nancy and Lennon, Randy}, issn = {19391455}, journal = {Psychological Bulletin}, number = {1}, pages = {100--131}, title = {{Sex Differences in Empathy and Related Capacities}}, volume = {94}, year = {1983} } @inproceedings{Gama2011, abstract = {Over the last decade extensive research has been conducted in the area of conversational agents focusing in many different aspects of these agents. In this research, and aiming at building agents that maintain a social connection with users, empathy has been one of those areas, as it plays a leading role in the establishment of social relationships. In this paper we present a relationship model of empathy that takes advantage of Social Penetration Theory's concepts for relationship building. This model has been implemented into an agent that attempts to establish a relationship with the user, expressing empathy both verbally and visually. The visual expression of empathy consists of facial expression and physical proximity representation. The user tests performed showed that while users were able to develop a simple relationship with the agents, they however developed stronger relationships with a version of the agent that is most visually expressive and takes advantage of the proximity element, confirming the significance of our model based on social penetration theory may have and, consequently, the importance of the visual representation of empathic responses.}, address = {Memphis, TN, USA}, author = {Gama, Sandra and Barata, Gabriel and Gon\c{c}alves, D. and Prada, R. and Paiva, Ana}, booktitle = {ACII'11 Proceedings of the 4th international conference on Affective computing and intelligent interaction - Volume Part I}, doi = {10.1007/978-3-642-24600-5\_54}, editor = {{D'Mello, Sidney K. and Graesser, Arthur C. and Schuller, Bj\"{o}rn and Martin}, Jean-Claude}, file = {::}, keywords = {affective computing,conversational agent,empathic agent}, pages = {507--516}, publisher = {Springer Berlin / Heidelberg}, title = {{SARA: social affective relational agent: a study on the role of empathy in artificial social agents}}, url = {http://www.springerlink.com/content/g0433kx744258w62/}, year = {2011} } @phdthesis{Becker-Asano2008, author = {Becker-Asano, Christian}, file = {::}, keywords = {Emotion,Empathy,PhD Thesis,Secondary Emotions,primary Emotions}, mendeley-tags = {PhD Thesis}, pages = {186}, publisher = {IOS Press}, school = {University of Bielefeld}, title = {{WASABI: Affect simulation for agents with believable interactivity}}, type = {PhD Dissertation, IOS Press (DISKI 319)}, url = {http://books.google.com/books?hl=en\&lr=\&id=8ABvlwHBCQIC\&oi=fnd\&pg=PA1\&dq=WASABI+:+Affect+Simulation+for+Agents+with+Believable+Interactivity\&ots=m6MhCZ6IzD\&sig=IcDYrCYofbGlJ8E1szs\_wltd18k}, volume = {319}, year = {2008} } @inproceedings{Heerink2009, author = {Heerink, Marcel and Krose, B. and Evers, Vanessa and Wielinga, Bob}, booktitle = {Robot and Human Interactive Communication, 2009. RO-MAN 2009. The 18th IEEE International Symposium on}, file = {::}, pages = {528--533}, publisher = {Ieee}, title = {{Measuring acceptance of an assistive social robot: a suggested toolkit}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5326320}, year = {2009} } @inproceedings{Kashyap2012, abstract = {Earlier works on personalized Web search focused on the click- through graphs, while recent works leverage social annotations, which are often unavailable. On the other hand, many users are members of the social networks and subscribe to social groups. Intuitively, users in the same group may have similar relevance judgments for queries related to these groups. SonetRank utilizes this observation to personalize the Web search results based on the aggregate relevance feedback of the users in similar groups. SonetRank builds and maintains a rich graph-based model, termed Social Aware Search Graph, consisting of groups, users, queries and results click-through information. SonetRank’s personalization scheme learns in a principled way to leverage the following three signals, of decreasing strength: the personal document preferences of the user, of the users of her social groups relevant to the query, and of the other users in the network. SonetRank also uses a novel approach to measure the amount of personalization with respect to a user and a query, based on the query-specific richness of the user’s social profile. We evaluate SonetRank with users on Amazon Mechanical Turk and show a significant improvement in ranking compared to state-of-the-art techniques.}, address = {Maui, HI, USA}, author = {Kashyap, Abhijith and Amini, Reza and Hristidis, Vagelis}, booktitle = {ACM 21st Conference on Information and Knowledge Management CIKM 2012}, file = {::}, isbn = {9781450311564}, keywords = {Results Re-ranking.,Search Personalization,Social Search}, publisher = {ACM}, title = {{SonetRank : Leveraging Social Networks to Personalize Search}}, year = {2012} } @article{Fellner2012, abstract = {Individuals may differ in their ability to learn the significance of emotional cues within a specific context. If so, trait emotional intelligence (EI) may be associated with faster cue learning. This study (N = 180) tested whether trait EI predicts faster learning of a critical cue for discriminating ‘‘terrorists’’ from ‘‘non-terrorists’’, using virtual-reality heads as stimuli. The critical cue was either facial emotion (positive or negative), or a neutral feature (hat size). Cognitive ability and subjective state were also assessed. Par- ticipants were faster to learn with an emotive cue. Surprisingly, high trait EI was correlated with poorer performance, especially early in learning. Subjective distress was also associated with impaired learning to emotive cues. }, author = {Fellner, Angela N. and Matthews, Gerald and Shockley, Kevin D. and Warm, Joel S. and Zeidner, Moshe and Karlov, Lisa and Roberts, Richard D.}, doi = {10.1016/j.jrp.2012.01.004}, file = {::}, issn = {00926566}, journal = {Journal of Research in Personality}, keywords = {trait emotional intelligence}, month = jun, number = {3}, pages = {239--247}, publisher = {Elsevier Inc.}, title = {{Using emotional cues in a discrimination learning task: Effects of trait emotional intelligence and affective state}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0092656612000050}, volume = {46}, year = {2012} } @book{Fridlund1994, address = {San Diego}, author = {Fridlund, A.J.}, publisher = {Academic Press}, title = {{Human Facial Expression: An Evolutionary View}}, year = {1994} } @article{Barrett2007, abstract = {Experiences of emotion are content-rich events that emerge at the level of psychological description, but must be causally constituted by neurobiological processes. This chapter outlines an emerging scientific agenda for understanding what these experiences feel like and how they arise. We review the available answers to what is felt (i.e., the content that makes up an experience of emotion) and how neurobiological processes instantiate these properties of experience. These answers are then integrated into a broad framework that describes, in psychological terms, how the experience of emotion emerges from more basic processes. We then discuss the role of such experiences in the economy of the mind and behavior.}, author = {Barrett, Lisa Feldman and Mesquita, Batja and Ochsner, Kevin N and Gross, James J}, doi = {10.1146/annurev.psych.58.110405.085709}, editor = {Meyers, Editor-in-Chief Robert A}, institution = {Department of Psychology, Boston College, Chestnut Hill, Massachusetts 02467, USA. barretli$\backslash$@bc.edu}, isbn = {9780122274107}, issn = {00664308}, journal = {Annual Review of Psychology}, keywords = {affect,consciousness,emotion}, number = {1}, pages = {373--403}, pmid = {17002554}, publisher = {Annual Reviews}, title = {{The Experience of Emotion}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17002554}, volume = {58}, year = {2007} } @article{Brave2005, abstract = {Embodied computer agents are becoming an increasingly popular human-computer interaction technique. Often, these agents are programmed with the capacity for emotional expression. This paper investigates the psychological effects of emotion in agents upon users. In particular, two types of emotion were evaluated: self-oriented emotion and other-oriented, empathic emotion. In a 2 (self-oriented emotion: absent vs. present) by 2 (empathic emotion: absent vs. present) by 2 (gender dyad: male vs. female) between-subjects experiment (N = 96), empathic emotion was found to lead to more positive ratings of the agent by users, including greater likeability and trustworthiness, as well as greater perceived caring and felt support. No such effect was found for the presence of self-oriented emotion. Implications for the design of embodied computer agents are discussed and directions for future research suggested.}, author = {Brave, Scott and Nass, Clifford and Hutchinson, Kevin}, doi = {10.1016/j.ijhcs.2004.11.002}, file = {::}, issn = {10715819}, journal = {International Journal of Human-Computer Studies - Special issue: Subtle expressivity for characters and robots}, keywords = {affective computing,characters,embodied agents,emotion,empathy,social interfaces}, month = feb, number = {2}, pages = {161--178}, title = {{Computers that care: investigating the effects of orientation of emotion exhibited by an embodied computer agent}}, url = {http://www.sciencedirect.com/science/article/pii/S1071581904001284}, volume = {62}, year = {2005} } @article{Lafrance1976, author = {Lafrance, Marianne and Broadbent, M.}, doi = {10.1177/105960117600100307}, isbn = {1059601176}, issn = {1059-6011}, journal = {Group \& Organization Management}, month = sep, number = {3}, pages = {328--333}, title = {{Group Rapport: Posture Sharing as a Nonverbal Indicator}}, volume = {1}, year = {1976} } @inproceedings{Breitfuss2007, address = {New York, New York, USA}, author = {Breitfuss, Werner and Prendinger, Helmut and Ishizuka, Mitsuru}, booktitle = {Proceedings of the ninth international conference on Multimodal interfaces - ICMI '07}, doi = {10.1145/1322192.1322247}, file = {::}, isbn = {9781595938176}, keywords = {interfaces,multi-modal presentation,multimodal input and output}, pages = {319--322}, publisher = {ACM Press}, title = {{Automated generation of non-verbal behavior for virtual embodied characters}}, url = {http://portal.acm.org/citation.cfm?doid=1322192.1322247}, year = {2007} } @article{Sloan2009, author = {Sloan, Robin James Stuart and Cook, Malcolm and Robinson, Brian}, doi = {10.1109/VIZ.2009.28}, file = {::}, isbn = {978-0-7695-3734-4}, journal = {2009 Second International Conference in Visualisation}, keywords = {- character animation,believability,emotional expression,facial animation,inform artistic practice,of and between emotional,perception,produce and test animations,the primary goal of,the project is to}, month = jul, pages = {61--66}, publisher = {Ieee}, title = {{Considerations for Believable Emotional Facial Expression Animation}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5230711}, year = {2009} } @article{VanSwol2003, author = {{Van Swol}, Lyn M.}, doi = {10.1177/0093650203253318}, file = {::}, issn = {00000000}, journal = {Communication Research}, month = aug, number = {4}, pages = {461--480}, title = {{The Effects of Nonverbal Mirroring on Perceived Persuasiveness, Agreement with an Imitator, and Reciprocity in a Group Discussion}}, volume = {30}, year = {2003} } @article{Denef2009, author = {Denef, Sebastian}, file = {::}, journal = {Human-Computer Interaction–INTERACT 2009}, pages = {864--867}, publisher = {Springer}, title = {{Human-Computer Interaction Techniques in Firefighting}}, url = {http://www.springerlink.com/index/n0688783567n3251.pdf}, year = {2009} } @inproceedings{Hegel2006, author = {Hegel, Frank and Spexard, Torsten and Wrede, Britta and Horstmann, G. and Vogt, T.}, booktitle = {Humanoid Robots, 2006 6th IEEE-RAS International Conference on}, file = {::}, isbn = {142440200X}, pages = {56--61}, publisher = {IEEE}, title = {{Playing a different imitation game: Interaction with an Empathic Android Robot}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=4115580}, year = {2006} } @article{Cassell2001a, abstract = {Prior research into embodied interface agents has found that users like them and find them engaging. However, results on the effectiveness of these interfaces for task completion have been mixed. In this paper, we argue that embodiment can serve an even stronger function if system designers use actual human conversational protocols in the design of the interface. Communicative behaviors such as salutations and farewells, conversational turn-taking with interruptions, and describing objects using hand gestures are examples of protocols that all native speakers of a language already know how to perform and can thus be leveraged in an intelligent interface. We discuss how these protocols are integrated into Rea, an embodied, multi-modal interface agent who acts as a real-estate salesperson, and we show why embodiment is required for their successful implementation.}, author = {Cassell, Justine}, doi = {10.1016/S0950-7051(00)00102-7}, issn = {09507051}, journal = {Knowledge-Based Systems}, keywords = {communicative behavior,embodied conversational agent,embodied interface agent,rea}, number = {1-2}, pages = {55--64}, publisher = {Elsevier}, title = {{More than just a pretty face: conversational protocols and the affordances of embodiment}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0950705100001027}, volume = {14}, year = {2001} } @inproceedings{Knoppel2008, abstract = {DEIRA is a virtual agent commenting on virtual horse races in real time. DEIRA analyses the state of the race, acts emotionally and comments about the situation in a believable and engaging way, using synthesized speech and facial expressions. In this paper we discuss the challenges, explain the computational models for the cognitive, emotional and communicative behavior, and account on implementation and feedback from users.}, address = {Estoril, Portugal}, author = {Knoppel, Fran\c{c}ois L A and Tigelaar, Almer S and Bos, Danny Oude and Alofs, Thijs and Ruttkay, Zs\'{o}fia}, booktitle = {7th international joint conference on Autonomous agents and multiagent systems}, editor = {Padgham and Parkes and M\"{u}ller and Parsons}, file = {::}, keywords = {emotion,facial expressions,intelligent virtual agent,modeling,multimodal communication,synthetic speech}, pages = {112--119}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems (www.ifaamas.org)}, title = {{Trackside DEIRA : A Dynamic Engaging Intelligent Reporter Agent}}, url = {http://dl.acm.org/citation.cfm?id=1402404}, year = {2008} } @article{Sayette2001, abstract = {The Facial Action Coding System (FACS) (Ekman \& Friesen, 1978) is a comprehensive and widely used method of objectively describing facial activity. Little is known, however, about inter-observer reliability in coding the occurrence, intensity, and timing of individual FACS action units. The present study evaluated the reliability of these measures. Observational data came from three independent laboratory studies designed to elicit a wide range of spontaneous expressions of emotion. Emotion challenges included olfactory stimulation, social stress, and cues related to nicotine craving. Facial behavior was video-recorded and independently scored by two FACS-certified coders. Overall, we found good to excellent reliability for the occurrence, intensity, and timing of individual action units and for corre- sponding measures of more global emotion-specified combinations.}, author = {Sayette, MA and Cohn, JF and Wertz, JM}, file = {::}, journal = {Journal of Nonverbal Behavior}, keywords = {FACS,facial expression,reliability}, number = {3}, pages = {167--185}, title = {{A psychometric evaluation of the facial action coding system for assessing spontaneous expression}}, url = {http://www.springerlink.com/index/h6g98m62j8r3up62.pdf}, volume = {25}, year = {2001} } @article{Lien1998, author = {Lien, James J and Cohn, Jeffrey F and Kanade, Takeo and Li, Ching-Chung}, file = {::}, journal = {IEEE Proceedings of FG'98}, title = {{Automated Facial Expression Recognition Based on FACS Action Units University of Pittsburgh Takeo Kanade Vision and Autonomous Systems Center Ching-Chung Li}}, year = {1998} } @phdthesis{Becker-Asano2008, author = {Becker-Asano, Christian}, file = {::}, keywords = {Emotion,Empathy,PhD Thesis,Secondary Emotions,primary Emotions}, mendeley-tags = {PhD Thesis}, pages = {186}, publisher = {IOS Press}, school = {University of Bielefeld}, title = {{WASABI: Affect simulation for agents with believable interactivity}}, type = {PhD Dissertation, IOS Press (DISKI 319)}, url = {http://books.google.com/books?hl=en\&lr=\&id=8ABvlwHBCQIC\&oi=fnd\&pg=PA1\&dq=WASABI+:+Affect+Simulation+for+Agents+with+Believable+Interactivity\&ots=m6MhCZ6IzD\&sig=IcDYrCYofbGlJ8E1szs\_wltd18k}, volume = {319}, year = {2008} } @inproceedings{Nguyen2009, abstract = {Experiencing emotional distress is the number one reason why people who are undergoing behaviour modification (e.g. quitting smoking, dieting) suffer from relapses. Providing emotional support is an effective way to help them overcome the unpleasant effects of negative affect and adhere to their regimen. Building computers with such ability has grabbed the attention of the HCI community in recent years. This paper presents the results of a 2 (modality: animated vs. no visual) by 3 (intervention: non-empathy vs. empathy vs. empathy and expressivity) between-subjects study that investigates the impact of two important factors and their interaction in the design of such systems: (1) different ways of expressing empathy, and (2) the modality of delivering such content.}, author = {Nguyen, H. and Masthoff, Judith}, booktitle = {Proceedings of the 4th International Conference on Persuasive Technology}, file = {::}, isbn = {9781605583761}, keywords = {affective computing,design,experimentation,human factors}, pages = {7}, publisher = {ACM}, title = {{Designing empathic computers: the effect of multimodal empathic feedback using animated agent}}, url = {http://portal.acm.org/citation.cfm?id=1541958}, year = {2009} } @article{Tartaro2008, abstract = {In this paper, we describe an intervention for children with social and communication deficits, such as autism, based on the use of a virtual peer that can engage in tightly collaborative narrative. We present a study in which children with autism engage in collaborative narrative with both a virtual and a human peer, and the use of contingent discourse is compared. Our findings suggest that contingent discourse increased over the course of interaction with a virtual peer, but not a human peer. Furthermore, topic management, such as introducing new topics or maintaining the current topic, was more likely to occur with the virtual peer than with the human peer. We discuss general implications of our work for understanding the role of peer interactions in learning.}, author = {Tartaro, Andrea and Cassell, Justine}, journal = {Analysis}, pages = {382--389}, publisher = {International Society of the Learning Sciences}, title = {{Playing with Virtual Peers : Bootstrapping Contingent Discourse in Children with Autism}}, url = {http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=pubmed\&cmd=Retrieve\&dopt=AbstractPlus\&list\_uids=5027430324945433305related:2TJmez4FxUUJ}, volume = {2}, year = {2008} } @inproceedings{Sourina2011, address = {New York, New York, USA}, author = {Sourina, Olga and Liu, Y. and Nguyen, Minh Khoa}, booktitle = {SIGGRAPH Asia 2011 Posters on - SA '11}, doi = {10.1145/2073304.2073315}, file = {::}, isbn = {9781450311373}, pages = {1}, publisher = {ACM Press}, title = {{Emotion-enabled EEG-based interaction}}, url = {http://dl.acm.org/citation.cfm?doid=2073304.2073315}, year = {2011} } @inproceedings{Paiva2004, address = {Washington, DC, USA}, author = {Paiva, Ana and Dias, J. and Sobral, Daniel and Aylett, Ruth}, booktitle = {AAMAS '04 Proceedings of the Third International Joint Conference on Autonomous Agents and Multiagent Systems}, doi = {10.1109/AAMAS.2004.82}, file = {::}, isbn = {1581138644}, pages = {194--201}, publisher = {IEEE Computer Society}, title = {{Caring for agents and agents that care: Building empathic relations with synthetic agents}}, url = {http://dl.acm.org/citation.cfm?id=1018754 http://dx.doi.org/10.1109/AAMAS.2004.82}, year = {2004} } @inproceedings{Huang2011, abstract = {Rapport, the feeling of being "in sync" with your conversational partners, is argued to underlie many desirable social effects. By generating proper verbal and nonverbal behaviors, virtual humans have been seen to create rapport during interactions with human users. In this paper, we introduce our approach to creating rapport following Tickle-Degnen and Rosenberg's threefactor (positivity, mutual attention and coordination) theory of rapport. By comparing with a previously published virtual agent, the Rapport Agent, we show that our virtual human predicts the timing of backchannel feedback and end-of-turn more precisely, performs more natural behaviors and, thereby creates much stronger feelings of rapport between users and virtual agents.}, address = {Reykjaavik, Iceland}, author = {Huang, Lixing and Morency, Louis-philippe and Gratch, Jonathan}, booktitle = {Proceedings of the 11th international conference on Intelligent virtual agents (IVA'11)}, file = {::}, keywords = {coordination,mutual attention,positivity,rapport,virtual human}, pages = {68--79}, publisher = {Springer-Verlag Berlin, Heidelberg}, title = {{Virtual Rapport 2.0}}, year = {2011} } @inproceedings{Cramer2010, author = {Cramer, Henriette and Goddijn, Jorrit and Wielinga, Bob and Evers, Vanessa}, booktitle = {Proceeding of the 5th ACM/IEEE international conference on Human-robot interaction}, file = {::}, isbn = {9781424448937}, pages = {141--142}, publisher = {ACM}, title = {{Effects of (in) accurate empathy and situational valence on attitudes towards robots}}, url = {http://dl.acm.org/citation.cfm?id=1734513}, year = {2010} } @inproceedings{Jaques2004, abstract = {In this paper we describe the use of mental states, more specifically the BDI approach, to implement the process of affective diagnosis in an educational environment. We use the OCC model, which is based on the cognitive theory of emotions and is possible to be implemented computationally, in order to infer the learner’s emotions from his actions in the system interface. The BDI approach is very adequate since the emotions have a dynamic nature. Besides, in our work we profit from the reasoning capacity of the BDI approach in order to infer the student’s appraisal, which allow us to deduce student’s emotions.}, address = {Puebla}, author = {Jaques, Patricia Augustin and Viccari, Rosa M}, booktitle = {IBERO-AMERICAN CONFERENCE ON ARTIFICIAL INTELLIGENCE (IBERAMIA)}, file = {::}, pages = {901--911}, publisher = {Springer-Verlag}, title = {{A BDI Approach to Infer Student's Emotions}}, year = {2004} } @book{Greene2003, abstract = {Providing a thorough review and synthesis of work on communication skills and skill enhancement, this "Handbook" serves as a comprehensive and contemporary survey of theory and research on social interaction skills. Editors John O. Greene and Brant R. Burleson have brought together preeminent researchers and writers to contribute to this volume, establishing a foundation on which future study and research will build. The handbook chapters are organized into five major units: general theoretical and methodological issues (models of skill acquisition, methods of skill assessment); fundamental interaction skills (both transfunctional and transcontextual); function-focused skills (informing, persuading, supporting); skills used in management of diverse personal relationships (friendships, romances, marriages); and skills used in varied venues of public and professional life (managing leading, teaching). Distinctive features of this handbook include: broad, comprehensive treatment of work on social interaction skills and skill acquisition; up-to-date reviews of research in each area; and emphasis on empirically supported strategies for developing and enhancing specific skills. Researchers in communication studies, psychology, family studies, business management, and related areas will find this volume a comprehensive, authoritative source on communications skills and their enhancement, and it will be essential reading for scholars and students across the spectrum of disciplines studying social interaction.}, author = {Greene, John O and Burleson, Brant Raney}, booktitle = {Communication}, editor = {Greene, John O and Burleson, Brant R}, isbn = {0805834176}, publisher = {Lawrence Erlbaum Associates, Inc., Publishers}, title = {{Handbook of Communication and Social Interaction Skills}}, year = {2003} } @article{Grynberg2010, abstract = {Alexithymia and empathy have been related but very little is known on shared variance between their respective affective and cognitive dimensions. We examined this question with correlations, as well as both exploratory and confirmatory analyses, and controlled for anxiety and depression. The responses of 645 young adults to self-report questionnaires of alexithymia (TAS-20), empathy (IRI), anxiety (STAI-T) and depression (BDI-13) were examined. We observed associations between the proposed cog- nitive components of alexithymia (externally-oriented thinking) and that of empathy (perspective taking, fantasy) as well as empathic concern, which were insensitive to anxiety or depression. In contrast, asso- ciations between the proposed affective components of alexithymia (difficulty identifying feelings, diffi- culty describing feelings) and empathy (personal distress) were largely due to shared covariance with anxiety. A model encompassing an affective and a cognitive (including empathic concern) latent factors emerged, even after controlling for dysphoric affects. These findings suggest specific associations between cognitive and affective components of both constructs that were dissimilarly affected by anxiety and depression. The allocation of empathic concern to the cognitive factor is also discussed.}, author = {Grynberg, Delphine and Luminet, Olivier and Corneille, Olivier and Gr\`{e}zes, Julie and Berthoz, Sylvie}, doi = {10.1016/j.paid.2010.07.013}, file = {::}, issn = {01918869}, journal = {Personality and Individual Differences}, keywords = {Alexithymia,Anxiety,Depression,Empathy,IRI,TAS-20}, mendeley-tags = {Alexithymia,Anxiety,Depression,Empathy,IRI,TAS-20}, month = dec, number = {8}, pages = {845--850}, publisher = {Elsevier Ltd}, title = {{Alexithymia in the interpersonal domain: A general deficit of empathy?}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S019188691000365X}, volume = {49}, year = {2010} } @article{Gratch2006, author = {Gratch, Jonathan and Okhmatovskaia, Anna and Lamothe, Francois}, journal = {Intelligent Virtual}, title = {{Virtual rapport}}, url = {http://www.springerlink.com/index/k720537752657m81.pdf}, year = {2006} } @article{Lee2009, author = {Lee, Jina and Prendinger, H}, file = {::}, isbn = {9781424447992}, journal = {Affective Computing}, title = {{Learning models of speaker head nods with affective information}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5349543}, year = {2009} } @inproceedings{Jiang2007, author = {Jiang, Hong and Vidal, J.M. and Huhns, M.N.}, booktitle = {Proceedings of the 6th international joint conference on Autonomous agents and multiagent systems}, keywords = {agent architecture,belief-desire-intention,emotional agent}, pages = {11}, publisher = {ACM}, title = {{EBDI: an architecture for emotional agents}}, url = {http://dl.acm.org/citation.cfm?id=1329139}, year = {2007} } @article{Elliot2001, author = {Elliot, Andrew J. and McGregor, holly A.}, file = {::}, journal = {Journal of Personality and Social Psychology}, number = {3}, pages = {501--519}, title = {{A 2 x 2 achievement goal framework.pdf}}, volume = {80}, year = {2001} } @inproceedings{Gordon1985, abstract = {Despite the almost complete lack of research addressing a theoretical understanding of empathy or ways to increase human empathy, empathy is a central component of effective human communication. Seen as a key social science phenomenon, it is viewed, along with power, as an inextricable component of human dynamics, and, in its relationship with altruism, possibly plays a causal role. A problem with research on empathy has been a lack of conceptual clarity. Three ways to improve empathetic listening are to avoid judgment, give the speaker time to speak without interruption, and focus on the speaker. Many of the helping professions have attempted training programs aimed at increasing the empathetic communication skills of practitioners in these fields. However, being told to listen empathetically is not the same as being taught to listen with empathy; and in critique of the empathy skills programs that are conducted within the helping professions, a significantly raised test score does not mean that empathy has been attained. Although empathetic communication is a complex subject matter, skills associated with empathy and active listening have been perceived as being more important than skills associated with critical or deliberative listening.}, address = {Baguio, Philippines}, author = {Gordon, Ronald D.}, booktitle = {International Conference of the World Communication Association}, file = {::}, keywords = {Communication (thought transfer),empathhy,interpersonal communication,listening,listening habits,listening skills,speech communication}, pages = {1--16}, title = {{Empathy: The State of the Art and Science}}, year = {1985} } @article{Barrett2007, abstract = {Experiences of emotion are content-rich events that emerge at the level of psychological description, but must be causally constituted by neurobiological processes. This chapter outlines an emerging scientific agenda for understanding what these experiences feel like and how they arise. We review the available answers to what is felt (i.e., the content that makes up an experience of emotion) and how neurobiological processes instantiate these properties of experience. These answers are then integrated into a broad framework that describes, in psychological terms, how the experience of emotion emerges from more basic processes. We then discuss the role of such experiences in the economy of the mind and behavior.}, author = {Barrett, Lisa Feldman and Mesquita, Batja and Ochsner, Kevin N and Gross, James J}, doi = {10.1146/annurev.psych.58.110405.085709}, editor = {Meyers, Editor-in-Chief Robert A}, institution = {Department of Psychology, Boston College, Chestnut Hill, Massachusetts 02467, USA. barretli$\backslash$@bc.edu}, isbn = {9780122274107}, issn = {00664308}, journal = {Annual Review of Psychology}, keywords = {affect,consciousness,emotion}, number = {1}, pages = {373--403}, pmid = {17002554}, publisher = {Annual Reviews}, title = {{The Experience of Emotion}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17002554}, volume = {58}, year = {2007} } @incollection{Dautenhahn2002, author = {Dautenhahn, Kerstin and Bond, Alan and Ca\~{n}amero, Lola and Edmonds, Bruce}, booktitle = {Socially Intelligent Agents: Creating Relationships with Computers and Robots}, chapter = {1}, editor = {Dautenhahn, Kerstin and Bond, Alan and Ca\~{n}amero, Lola and Edmonds, Bruce}, file = {::}, isbn = {978-1-4020-7057-0}, pages = {1--20}, publisher = {Springer}, title = {{Creating Relationships with Computers and Robots}}, url = {http://www.springerlink.com/index/V38H434X220766G8.pdf}, year = {2002} } @inproceedings{Kang2008, author = {Kang, Sin-hwa and Gratch, Jonathan and Wang, Ning and Watt, J.H.}, booktitle = {Proceedings of the 7th international joint conference on Autonomous agents and multiagent systems-Volume 1}, file = {::}, keywords = {agents,contingency of nonverbal feedback,evaluation,rapport,social anxiety,virtual humans}, number = {Aamas}, pages = {120--127}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems}, title = {{Does the contingency of agents' nonverbal feedback affect users' social anxiety?}}, url = {http://dl.acm.org/citation.cfm?id=1402405}, year = {2008} } @article{Gray1985, abstract = {Attempts to show that the experimental psychology of the rat and the neuropsychology of the rat's brain are of relevance to clinical psychology. It is suggested that there is a false dichotomy between the behaviorist and cognitive approaches to psychology and illustrates this by going from a behaviorist analysis of a psychological concept (anxiety) to a cognitive analysis of that concept, basing the argument on brain research: Damage to the septo-hippocampal system mimics the behavioral effects of the antianxiety drugs. The reason for this mimicry is probably that these drugs reduce the noradrenergic input to the septo-hippocampal system. The noradrenergic input is normally activated under conditions of stress and serves to increase the capacity of the septo-hippocampal system to handle information. It seems probable, therefore, that the state of anxiety is, to some degree at least, mediated by activity in the septo-hippocampal system. It is emphasized that there is no dichotomy between cognitive and behaviorist psychology because the brain controls both behavior and cognition.}, author = {Gray, J. A}, journal = {Bulletin of the British Psychological Society}, pages = {99--112}, title = {{The whole and its parts: Behaviour, the brain, cognition and emotion}}, volume = {38}, year = {1985} } @article{Jaques2007, abstract = {In this article we describe the use of mental states approach, more specifically the belief-desire-intention (BDI) model, to implement the process of affective diagnosis in an educational environment. We use the psychological OCC model, which is based on the cognitive theory of emotions and is possible to be imple- mented computationally, in order to infer the learners emotions from his actions in the system interface. In our work we profit from the reasoning capacity of the BDI model in order to infer the students appraisal (a cognitive evaluation of a person that elicits an emotion), which allows us to deduce students emotions. The system reasons about an emotion-generating situation and tries to infer the users emotion by using the OCC model. Besides, the BDI model is very adequate to infer and also model students affective states since the emotions have a dynamic nature.}, author = {Jaques, Patricia Augustin and Vicari, Rosa Maria}, doi = {10.1016/j.compedu.2005.09.002}, file = {::}, issn = {03601315}, journal = {Computers \& Education}, keywords = {architectures for educational technology,computer,distance education and telelearning,human,intelligent tutoring systems,interactive learning environments,interface,media in education,system}, month = sep, number = {2}, pages = {360--384}, title = {{A BDI approach to infer student’s emotions in an intelligent learning environment}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0360131505001302}, volume = {49}, year = {2007} } @book{Izard1977, address = {New York}, author = {Izard, Carroll Ellis}, editor = {Izard, Carroll Ellis}, isbn = {9780306309861}, pages = {495}, publisher = {Plenum Press}, title = {{Human Emotions}}, year = {1977} } @article{Maurer1983, author = {Maurer, R.E. and Tindall, J.H.}, file = {::}, journal = {Journal of Counseling Psychology}, number = {2}, pages = {158}, publisher = {American Psychological Association}, title = {{Effect of postural congruence on client's perception of counselor empathy.}}, volume = {30}, year = {1983} } @article{Moridis2012a, abstract = {—Empathetic behavior has been suggested to be one effective way for Embodied Conversational Agents (ECAs) to provide feedback to learners’ emotions. An issue that has been raised is the effective integration of parallel and reactive empathy. The aim of this study is to examine the impact of ECAs’ emotional facial and tone of voice expressions combined with empathetic verbal behavior when displayed as feedback to students’ fear, sad, and happy emotions in the context of a self-assessment test. Three identical female agents were used for this experiment: 1) an ECA performing parallel empathy combined with neutral emotional expressions, 2) an ECA performing parallel empathy displaying emotional expressions that were relevant to the emotional state of the student, and 3) an ECA performing parallel empathy by displaying relevant emotional expressions followed by emotional expressions of reactive empathy with the goal of altering the student’s emotional state. Results indicate that an agent performing parallel empathy displaying emotional expressions relevant to the emotional state of the student may cause this emotion to persist. Moreover, the agent performing parallel and then reactive empathy appeared to be effective in altering an emotional state of fear to a neutral one.}, author = {Moridis, Christos N and Economides, Anastasios A and Member, Senior}, file = {::}, journal = {IEEE Transactions on Affective Computing}, keywords = {empathy,intelligent agents,user interfaces,—Computers and education}, number = {3}, pages = {260--272}, title = {{Affective Learning : Empathetic Agents with Emotional Facial and Tone of Voice Expressions}}, volume = {3}, year = {2012} } @incollection{Davis2006, address = {New York}, author = {Davis, Mark H.}, booktitle = {Handbook of the Socialogy of Emotions}, editor = {Stets, J. and Turner, J.}, publisher = {Springer Press}, title = {{Empathy}}, year = {2006} } @article{Prevost1994, abstract = {This paper presents a theory and a computational implementation for generating prosodically appropriate synthetic speech in response to database queries. Proper distinctions of contrast and emphasis are expressed in an intonation contour that is synthesized by rule under the control of a grammar, a discourse model, and a knowledge base. The theory is based on Combinatory Categorial Grammar, a formalism which easily integrates the notions of syntactic constituency, semantics, prosodic phrasing and information structure. Results from our current implementation demonstrate the system's ability to generate a variety of intonational possibilities for a given sentence depending on the discourse context.}, author = {Prevost, Scott and Steedman, Mark}, file = {::}, journal = {Speech Communication}, number = {1-2}, pages = {18}, publisher = {Citeseer}, title = {{Specifying Intonation from Context for Speech Synthesis}}, url = {http://arxiv.org/abs/cmp-lg/9407015}, volume = {15}, year = {1994} } @inproceedings{Dinda2007, abstract = {Experimental computer systems research typically ignores the end-user, modeling him, if at all, in overly simple ways. We argue that this (1) results in inadequate performance evaluation of the systems, and (2) ignores opportunities. We summarize our experiences with (a) directly evaluating user satisfaction and (b) incorporating user feedback in different areas of client/server computing, and use our experiences to motivate principles for that domain. Specifically, we report on user studies to measure user satisfaction with resource borrowing and with different clock frequencies in desktop computing, the development and evaluation of user interfaces to integrate user feedback into scheduling and clock frequency decisions in this context, and results in predicting user action and system response in a remote display system. We also present initial results on extending our work to user control of scheduling and mapping of virtual machines in a virtualization-based distributed computing environment. We then generalize (a) and (b) as recommendations for incorporating the user into experimental computer systems research.}, address = {New York, USA}, author = {Dinda, Peter A and Dick, Robert P and Rossoff, Samuel}, booktitle = {ExpCS '07 Proceedings of the 2007 workshop on Experimental computer science ACM}, doi = {10.1145/1281700.1281710}, file = {::}, isbn = {9781595937513}, keywords = {Autonomic Systems,Design,Experimentation,Human Directed Adaptation,Human Factors,Measurement,Performance,Speculative Remote Display,User Comfort With Resource Borrowing,User-driven Power Management,User-driven Scheduling}, number = {June}, pages = {1--12}, title = {{The User In Experimental Computer Systems Research}}, url = {http://dl.acm.org/citation.cfm?id=1281710}, year = {2007} } @article{Vanbaaren2004, author = {Van baaren, Rick B. and Holland, Rob W. and Kawakami, Kerry and Knippenberg, Ad Van}, doi = {10.1111/j.0963-7214.2004.01501012.x}, file = {::}, issn = {0956-7976}, journal = {Psychological Science}, month = jan, number = {1}, pages = {71--74}, title = {{Mimicry and Prosocial Behavior}}, url = {http://pss.sagepub.com/lookup/doi/10.1111/j.0963-7214.2004.01501012.x}, volume = {15}, year = {2004} } @article{Blairy1999, abstract = {Lipps (1907) presented a model of empathy which had an important influence on later formulations. According to Lipps, individuals tend to mimic an interaction partner's behavior, and this nonverbal mimicry induces—via a feedback process—the corresponding affective state in the observer. The resulting shared affect is believed to foster the understanding of the observed person's self. The present study tested this model in the context of judgments of emotional facial expressions. The results confirm that individuals mimic emotional facial expressions, and that the decoding of facial expressions is accompanied by shared affect. However, no evidence that emotion recognition accuracy or shared affect are mediated by mimicry was found. Yet, voluntary mimicry was found to have some limited influence on observer' s assessment of the observed person's personality. The implications of these results with regard to Lipps' original hypothesis are discussed.}, author = {Blairy, Sylvie and Herrera, Pedro and Hess, Ursula}, doi = {10.1023/A:1021370825283}, file = {::}, journal = {Journal of Nonverbal Behavior}, number = {1}, pages = {5--41}, title = {{Mimicry and the Judgment of Emotional Facial Expressions}}, url = {http://www.springerlink.com/content/unx02r46695w7651/ http://dx.doi.org/10.1023/A:1021370825283}, volume = {23}, year = {1999} } @inproceedings{Boukricha2011b, abstract = {Empathy is believed to play a major role as a basis for humans’ cooperative behavior. Recent research shows that humans empathize with each other to different degrees depending on several modulation factors including, among others, their social relationships, their mood, and the situational context. In human spatial interaction, partners share and sustain a space that is equally and exclusively reachable to them, the so-called interaction space. In a cooperative interaction scenario of relocating objects in interaction space, we introduce an approach for triggering and modulating a virtual humans cooperative spatial behavior by its degree of empathy with its interaction partner. That is, spatial distances like object distances as well as distances of arm and body movements while relocating objects in interaction space are modulated by the virtual human’s degree of empathy. In this scenario, the virtual human’s empathic emotion is generated as a hypothesis about the partner’s emotional state as related to the physical effort needed to perform a goal directed spatial behavior.}, address = {Berlin, Heidelberg}, author = {Boukricha, Hana and Nguyen, H.}, booktitle = {Proceedings of the 10th international conference on Intelligent virtual agents IVA'11}, doi = {10.1007/978-3-642-23974-8\_38}, editor = {Kopp, Stefan and Marsella, Stacy and Thorisson, Kristinn and Vilhjalmsson, Hannes}, pages = {350--362}, publisher = {Springer-Verlag}, title = {{Sharing Emotions and Space – Empathy as a Basis for Cooperative Spatial Interaction}}, url = {http://www.springerlink.com/content/q22784632u008337/}, year = {2011} } @incollection{Cassell1999a, abstract = {This paper addresses the problem of designing conversational agents that exhibit appropriate gaze behavior during dialogues with human users. Previous research on gaze behavior has concentrated on its relationship to turn-taking phenomena 4,5,6. Recent work has incorporated some of these findings into the design of autonomous human-like conversational agents and interactive communicative humanoids 1,14. However, none of this research has examined the relationship between information structure and gaze behavior. In this paper we discuss why turn- taking is not an adequate explanation for gaze behavior in conversation and why information structure should be integrated with turn-taking as an explanation for this behavior. We then examine the relationship of gaze behavior to information structure and turn-taking through an empirical analysis of discourse transcripts for several dyadic conversations. A simple algorithm for assigning gaze behavior is proposed on the basis of the findings of this empirical analysis. We describe work in progress on implementing this algorithm in an autonomous conversational humanoid agent with the goal of producing more natural gaze behavior related to propositional content in human- computer conversations.}, author = {Cassell, Justine and Torres, Obed E and Prevost, Scott}, booktitle = {Machine Conversations}, editor = {Wilks, Y}, pages = {143--154}, publisher = {Kluwer}, title = {{Turn Taking vs. Discourse Structure: How Best to Model Multimodal Conversation}}, url = {http://citeseer.ist.psu.edu/cassell98turn.html}, year = {1999} } @article{Larimer2009, abstract = {It is well established that college students have high rates of alcohol use and misuse and suffer the negative consequences of this behavior. Research evaluating the results of brief interventions with high-risk college students has shown these approaches to be successful in reducing alcohol con- sumption and/or related consequences. Several screening tools have been developed to detect the presence of problematic alcohol use and associated disorders, and some are designed specifically for use in a college student population. College campuses offer several opportunities to implement screening and interventions, including universal or large-scale assessments; health services, counsel- ing centers, or local emergency rooms; or via established judicial or grievance systems set up to deal with students who violate campus alcohol policies. Issues to consider when implementing screening and brief interventions in college populations include who should deliver the interventions—peer or professional counselors—and how students should be encouraged to participate in the interventions. Regardless of how the measures are implemented, the content and process of the brief interventions should be based on the available scientific evidence regarding established efficacious interventions.}, author = {Larimer, Mary E and Cronce, Jessica M and Lee, Christine M and Kilmer, Jason R}, file = {::}, journal = {Alcohol Research \& Health}, keywords = {AODD (alcohol and other drug use disorder),CAGE Questionnaire,Michigan Alcoholism Screening Test (MAST),Young Adult Alcohol Problems Screening Test (YAAPS,alcohol abuse,binge drinking,brief intervention,heavy drinking,identification and screening,interview,literature review,motivational interviewing,peer counseling,professional counseling,undergraduate student}, pages = {94--104}, title = {{Brief Intervention in College Settings}}, url = {?http://pubs.niaaa.nih.gov/publications/arh28 ?2/94?104 .htm}, volume = {28}, year = {2004} } @inproceedings{McQuiggan2008, abstract = {Humans continuously assess one another’s situational context, modify their own affective state, and then respond based on these outcomes through empathetic expression. Virtual agents should be capable of similarly empathizing with users in interactive environments. A key challenge posed by empathetic reasoning in virtual agents is determining whether to respond with parallel or reactive empathy. Parallel empathy refers to mere replication of another’s affective state, whereas reactive empathy exhibits greater cognitive awareness and may lead to incongruent emotional responses (i.e., emotions different from the recipient’s and perhaps intended to alter negative affect). This paper proposes a unified inductive framework for modeling parallel and reactive empathy. Empathy models are used to drive runtime situation-appropriate empathetic behaviors by selecting suitable parallel or reactive empathetic expressions.}, author = {McQuiggan, Scott W and Robison, Jennifer and Phillips, Robert}, booktitle = {on Autonomous agents}, file = {::}, number = {Aamas}, pages = {167--174}, title = {{Modeling parallel and reactive empathy in virtual agents: An inductive approach}}, year = {2008} } @article{Orozco2010, author = {Orozco, H. and Thalmann, Daniel and Ramos, F.}, file = {::}, journal = {Proceedings of 11th Computer Graphics International, CGI}, title = {{Making empathetic virtual humans in human–computer interaction scenarios}}, url = {http://cgi2010.miralab.unige.ch/short/SP09/SP09.pdf}, volume = {10}, year = {2010} } @article{Ambady1992, abstract = {A meta-analysis was conducted on the accuracy of predictions of various objective outcomes in the areas of social and clinical psychology from short observations of expressive behavior (under 5 min). The overall effect size (ry for the accuracy of predictions for 38 different resu1ts was .39. Studies using longer periods of behavioral observation did not yield greater predictive accuracy; predictions based on observations under 112 min in length did not differ significantly from predictions based on 4- and 5-min observations. The type ofbehavioral channel (such as the face, speech, the body, tone ofvoic on which the ratings were based was not related to the accuracy of predictions. Accuracy did not vary significantly between behaviors rnanipulated in a laboratory and more naturally occurring behavior. L t effect sizes did not differ significantly for predictions in the areas of clinical psychology social psychology, and the accuracy of detecting deception.}, author = {Ambady, N and Rosenthal, Robert}, doi = {10.1037/0033-2909.111.2.256}, file = {::}, issn = {00332909}, journal = {Psychological Bulletin}, number = {2}, pages = {256--274}, publisher = {American Psychological Association}, title = {{Thin slices of expressive behavior as predictors of interpersonal consequences: A meta-analysis.}}, volume = {111}, year = {1992} } @article{Riek2009, author = {Riek, Laurel D. and Paul, Philip C. and Robinson, Peter}, doi = {10.1007/s12193-009-0028-2}, file = {::}, issn = {1783-7677}, journal = {Journal on Multimodal User Interfaces}, keywords = {19,affective computing,emotionally conveying,empathy,expressions,facial,forms of expressive empathy,human-robot interaction,is known as,of the most basic,one,social robotics,understand what others are}, month = nov, number = {1-2}, pages = {99--108}, title = {{When my robot smiles at me: Enabling human-robot rapport via real-time head gesture mimicry}}, url = {http://www.springerlink.com/index/10.1007/s12193-009-0028-2}, volume = {3}, year = {2009} } @article{ArthurJ.Clark2010, abstract = {Expanding on a framework introduced by Carl Rogers, an integral model of empathy in counseling uses empathic understanding through 3 ways of knowing: Subjective empathy enables a counselor to momentarily experience what it is like to be a client, interpersonal empathy relates to understanding a client's phenomenological experiencing, and objective empathy uses reputable knowledge sources outside of a client's frame of reference. Across the counseling process, empathy is integral to treatment strategies and interventions.}, author = {{Arthur J. Clark}}, journal = {Journal of Counseling \& Development}, keywords = {Counseling,Counseling Techniques,Counselor Client Relationship,Empathy,Models}, number = {3}, pages = {348 -- 356}, title = {{Empathy: An integral model in the counseling process}}, url = {http://aca.metapress.com/link.asp?id=075658qt56l20466}, volume = {88}, year = {2010} } @article{Ekman1974, author = {Ekman, Paul and Freisen, Wallace V.}, file = {::}, journal = {Journal of Personality and Social Psychology}, number = {3}, pages = {288--298}, title = {{Detecting Deception From The Body Or Face}}, volume = {29}, year = {1974} } @misc{Mota2003, abstract = {This paper presents a system for recognizing naturally occurring postures and associated affective states related to a child's interest level while performing a learning task on a computer. Postures are gathered using two matrices of pressure sensors mounted on the seat and back of a chair. Subsequently, posture features are extracted using a mixture of four gaussians, and input to a 3-layer feed-forward neural network. The neural network classifies nine postures in real time and achieves an overall accuracy of 87.6\&x025; when tested with postures coming from new subjects. A set of independent Hidden Markov Models (HMMs) is used to analyze temporal patterns among these posture sequences in order to determine three categories related to a child's level of interest, as rated by human observers. The system reaches an overall performance of 82.3\&x025; with posture sequences coming from known subjects and 76.5\&x025; with unknown subjects.}, author = {Mota, Selene and Picard, Rosalind W}, booktitle = {2003 Conference on Computer Vision and Pattern Recognition Workshop}, doi = {10.1109/CVPRW.2003.10047}, institution = {IEEE}, isbn = {0769519008}, issn = {10636919}, pages = {49--49}, publisher = {Ieee}, title = {{Automated Posture Analysis for Detecting Learner's Interest Level}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=4624309}, volume = {5}, year = {2003} } @article{DiMatteo1980, abstract = {The relationship between physicians' nonverbal communication skills (their ability to communicate and to understand facial expression, body movement and voice tone cues to emotion) and their patients' satisfaction with medical care was examined in 2 studies. The research involved 71 residents in internal medicine and 462 of their ambulatory and hospitalized patients. Standardized, reliable and valid measures of nonverbal communication skills were administered to the physicians. Their scores on these tests were correlated with ratings they received from a sample of their patients on measures of satisfaction with the technical aspects and the socioemotional aspects (or art) of the medical care they received. While the nonverbal communication skills of the physicians bore little relationship to patients' ratings of the technical quality of care, measures of these skills did predict patient satisfaction with the art of medical care received. Across both samples, physicians who were more sensitive to body movement and posture cues to emotion (the channel suggested by nonverbal researchers as the one in which true affect can be perceived) received higher ratings from their patients on the art of care than did less sensitive physicians. In addition, physicians who were successful at expressing emotion through their nonverbal communications tended to receive higher ratings from patients on the art of care than did physicians who were less effective communicators. The implications of successfully identifying characteristics of physicians with whom patients are satisfied are discussed.}, author = {DiMatteo, M R and Taranta, A and Friedman, H S and Prince, L M}, file = {::}, issn = {0025-7079}, journal = {Medical care}, keywords = {Adult,Consumer Satisfaction,Evaluation Studies as Topic,Female,Humans,Male,Nonverbal Communication,Physician-Patient Relations}, month = apr, number = {4}, pages = {376--387}, pmid = {7401698}, title = {{Predicting patient satisfaction from physicians' nonverbal communication skills}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/7401698}, volume = {18}, year = {1980} } @inproceedings{Krinidis2003, address = {Crete, Greece}, author = {Krinidis, Stelios and Buciu, Ioan and Pitas, Ioannis}, booktitle = {10th International Conference on Human- Computer Interaction (HCI'03)}, pages = {22--27}, title = {{Facial expression analysis and synthesis: A survey}}, url = {http://pdf.aminer.org/000/368/466/multiscale\_facial\_expression\_recognition\_using\_convolutional\_neural\_networks.pdf}, year = {2003} } @article{LeonA.1995, author = {Kappelman, Leon A.}, journal = {Data Base Advances}, number = {2 \& 3}, pages = {65--86}, title = {{Measuring User Involvement: A Diffusion of Innovation Perspective}}, volume = {26}, year = {1995} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {::}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @article{Spurgeon2010, abstract = {There has been a recent acceleration in the development and testing of programs for computer-assisted cognitive-behavioral therapy (CCBT). Programs are now available for treatment of depression, anxiety disorders, and other psychiatric conditions. Technology for delivery of CCBT includes multimedia programs, virtual reality, and handheld devices. Research on CCBT generally has supported the efficacy of computer-assisted therapy and has shown patient acceptance of computer tools for psychotherapy. Completion rates and treatment efficacy typically have been higher when clinicians prescribe and support the use of psychotherapeutic computer programs than when programs are delivered in a self-help format without clinician involvement. CCBT seems to have the potential to improve access to evidence-based therapies while reducing the demand for clinician time.}, author = {Spurgeon, Joyce a and Wright, Jesse H}, doi = {10.1007/s11920-010-0152-4}, file = {::}, isbn = {1192001001}, issn = {1535-1645}, journal = {Current psychiatry reports}, keywords = {Anxiety Disorders,Anxiety Disorders: therapy,Cognitive Therapy,Depressive Disorder,Depressive Disorder: therapy,Humans,Therapy, Computer-Assisted,Treatment Outcome}, month = dec, number = {6}, pages = {547--52}, pmid = {20872100}, title = {{Computer-assisted cognitive-behavioral therapy.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/20872100}, volume = {12}, year = {2010} } @article{Mendelson1999, author = {Mendelson, M.J. and Aboud, F.E.}, file = {::}, journal = {Canadian Journal of Behavioural Science/Revue canadienne des sciences du comportement}, number = {2}, pages = {130}, publisher = {Canadian Psychological Association}, title = {{Measuring friendship quality in late adolescents and young adults: McGill Friendship Questionnaires.}}, url = {http://psycnet.apa.org/journals/cbs/31/2/130/}, volume = {31}, year = {1999} } @article{Zeng2009, abstract = {Automated analysis of human affective behavior has attracted increasing attention from researchers in psychology, computer science, linguistics, neuroscience, and related disciplines. However, the existing methods typically handle only deliberately displayed and exaggerated expressions of prototypical emotions despite the fact that deliberate behaviour differs in visual appearance, audio profile, and timing from spontaneously occurring behaviour. To address this problem, efforts to develop algorithms that can process naturally occurring human affective behaviour have recently emerged. Moreover, an increasing number of efforts are reported toward multimodal fusion for human affect analysis including audiovisual fusion, linguistic and paralinguistic fusion, and multi-cue visual fusion based on facial expressions, head movements, and body gestures. This paper introduces and surveys these recent advances. We first discuss human emotion perception from a psychological perspective. Next we examine available approaches to solving the problem of machine understanding of human affective behavior, and discuss important issues like the collection and availability of training and test data. We finally outline some of the scientific and engineering challenges to advancing human affect sensing technology.}, author = {Zeng, Zhihong and Pantic, Maja and Roisman, Glenn I and Huang, Thomas S}, doi = {10.1109/TPAMI.2008.52}, file = {::}, issn = {0162-8828}, journal = {IEEE transactions on pattern analysis and machine intelligence}, keywords = {Affect,Affect: physiology,Algorithms,Artificial Intelligence,Automated,Automated: methods,Emotions,Emotions: physiology,Facial Expression,Monitoring,Pattern Recognition,Physiologic,Physiologic: methods,Sound Spectrography,Sound Spectrography: methods}, month = jan, number = {1}, pages = {39--58}, pmid = {19029545}, title = {{A survey of affect recognition methods: audio, visual, and spontaneous expressions.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19029545}, volume = {31}, year = {2009} } @book{Prendinger2004a, abstract = {Life-like characters is one of the most exciting technologies for human-computer interface applications today. They convincingly take the roles of virtual presenters, synthetic actors and sales personas, teammates and tutors. A common characteristic underlying their life-likeness or believability as virtual conversational partners is computational models that provide them with affective functions such as synthetic emotions and personalities and implement human interactive behavior. The wide dissemination of life-like characters in multimedia systems, however, will greatly depend on the availability of control languages and tools that facilitate scripting of intelligent conversational behaviour. This book presents the first comprehensive collection of the latest developments in scripting and representation languages for life-like characters, rounded off with an in-depth comparison and synopsis of the major approaches. Introducing toolkits for authoring animated characters further supports the ease of use of this new interface technology. Life-like characters being a vibrant research area, various applications have been designed and implemented. This book offers coverage of the most successful and promising applications, ranging from product presentation and student training to knowledge integration and interactive gaming. It also discusses the key challenges in the area and provides design guidelines for employing life-like characters.}, author = {{Helmut Prendinger}, Mitsuru Ishizuka}, isbn = {3540008675, 9783540008675}, publisher = {Springer}, title = {{Life-Like Characters: Tools, Affective Functions, and Applications}}, year = {2004} } @article{Dada2006, abstract = {Images: p1372-a:}, author = {Dada, Michael}, journal = {Journal of the National Medical Association}, number = {8}, pages = {1372}, publisher = {Motivate Healthy Habits}, title = {{Motivational Practice: Promoting Healthy Habits and Self-Care of Chronic Diseases}}, volume = {98}, year = {2006} } @article{Segal2011, author = {Segal, Elizabeth}, doi = {10.1080/01488376.2011.564040}, file = {::}, issn = {0148-8376}, journal = {Journal of Social Service Research}, keywords = {a dedication to justice,a nation that proclaims,and social well-being and,civic involvement,empathy,scapegoating,social empathy,social responsibility,the united states is}, month = may, number = {3}, pages = {266--277}, title = {{Social Empathy: A Model Built on Empathy, Contextual Understanding, and Social Responsibility That Promotes Social Justice}}, url = {http://www.informaworld.com/openurl?genre=article\&doi=10.1080/01488376.2011.564040\&magic=crossref||D404A21C5BB053405B1A640AFFD44AE3}, volume = {37}, year = {2011} } @inproceedings{Massimi2010, author = {Massimi, M. and Baecker, R.M.}, booktitle = {Proceedings of the 28th international conference on Human factors in computing systems}, file = {::}, pages = {1821--1830}, publisher = {ACM}, title = {{A death in the family: opportunities for designing technologies for the bereaved}}, url = {http://dl.acm.org/citation.cfm?id=1753600}, year = {2010} } @article{Devoldre2010, abstract = {Social support researchers and clinicians have repeatedly expressed the need to identify the antecedents of social support provision within close relationships. The aim of the present study is to investigate the extent to which individual differences in cognitive empathy (perspective taking) and affective empathy (empathic concern and personal distress) are predictive of social support provision in couples. Study 1 involved 83 female participants in a relatively young relationship; Study 2 involved 128 married couples. The authors used self-report measures in both studies to assess individual differences in empathy and participants' support provision behaviors. The main findings suggest a significant contribution of the different components of empathy with rather different pictures for each of these components. The authors discuss the present findings in light of existing theory and research on social support in relationships.}, author = {Devoldre, Inge and Davis, Mark H. and Verhofstadt, Lesley L and Buysse, Ann}, doi = {10.1080/00223981003648294}, file = {::}, issn = {0022-3980}, journal = {The Journal of psychology}, keywords = {80 and over,Adolescent,Adult,Affect,Aged,Empathy,Family Characteristics,Female,Humans,Individuality,Male,Middle Aged,Personal Construct Theory,Personality Inventory,Personality Inventory: statistics \& numerical data,Psychometrics,Social Support,Young Adult}, number = {3}, pages = {259--284}, pmid = {20461931}, title = {{Empathy and social support provision in couples: social support and the need to study the underlying processes.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/21506454}, volume = {144}, year = {2010} } @book{Greene2003, abstract = {Providing a thorough review and synthesis of work on communication skills and skill enhancement, this "Handbook" serves as a comprehensive and contemporary survey of theory and research on social interaction skills. Editors John O. Greene and Brant R. Burleson have brought together preeminent researchers and writers to contribute to this volume, establishing a foundation on which future study and research will build. The handbook chapters are organized into five major units: general theoretical and methodological issues (models of skill acquisition, methods of skill assessment); fundamental interaction skills (both transfunctional and transcontextual); function-focused skills (informing, persuading, supporting); skills used in management of diverse personal relationships (friendships, romances, marriages); and skills used in varied venues of public and professional life (managing leading, teaching). Distinctive features of this handbook include: broad, comprehensive treatment of work on social interaction skills and skill acquisition; up-to-date reviews of research in each area; and emphasis on empirically supported strategies for developing and enhancing specific skills. Researchers in communication studies, psychology, family studies, business management, and related areas will find this volume a comprehensive, authoritative source on communications skills and their enhancement, and it will be essential reading for scholars and students across the spectrum of disciplines studying social interaction.}, author = {Greene, John O and Burleson, Brant Raney}, booktitle = {Communication}, editor = {Greene, John O and Burleson, Brant R}, isbn = {0805834176}, pages = {1051}, publisher = {Routledge}, title = {{Handbook of Communication and Social Interaction Skills}}, url = {http://books.google.com/books?id=B0BAStlhpWQC\&pgis=1}, year = {2003} } @article{Cowell2005, abstract = {For years, people have sought more natural means of communicating with their computers. Many have suggested that interaction with a computer should be as easy as interacting with other people, taking advantage of the multimodal nature of human communication. While users should, in theory, gravitate to such anthropomorphic embodiments, quite the contrary has been experienced; users generally have been dissatisfied and abandoned their use. This suggests a disconnect between factors that make human-human communication engaging and those used by designers to support human-agent interaction. This paper discusses a set of empirical studies that attempted to replicate human-human non-verbal behavior. The focus revolved around behaviors that portray a credible fa\c{c}ade, thereby helping embodied conversational agents (ECAs) to form a successful cooperative dyad with users. Based on a review of the non-verbal literature, a framework was created that identified trustworthy and credible non-verbal behaviors across five areas and formed design guidelines for character interaction. The design suggestions for those areas emanating from the facial region were experimentally supported but there was no concordant increase in perceived trust when bodily regions (posture, gesture) were added. In addition, in examining the importance of demographic elements in embodiment, it was found that users prefer to interact with characters that match their ethnicity and are young looking. There was no significant preference for gender. The implications of these results, as well as other interesting consequences are discussed.}, author = {Cowell, Andrew J. and Stanney, Kay M.}, doi = {10.1016/j.ijhcs.2004.11.008}, issn = {10715819}, journal = {International Journal of Human-Computer Studies - Special issue: Subtle expressivity for characters and robots}, keywords = {Anthropomorphic interfaces,Interface agents,Non-verbal behavior}, month = feb, number = {2}, pages = {281--306}, title = {{Manipulation of non-verbal interaction style and demographic embodiment to increase anthropomorphic computer character credibility}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S1071581904001260 http://ocw.tudelft.nl/fileadmin/ocw/opener/Manipulation\_of\_non-verbal\_interaction\_style\_and\_demographic\_embodiment\_to\_increase\_anthropomorphic\_computer\_character\_credibility.pdf}, volume = {62}, year = {2005} } @article{Poh2010, author = {Poh, MZ and McDuff, DJ}, file = {::}, journal = {Optics Express}, number = {10}, pages = {10762--10774}, title = {{Non-contact, automated cardiac pulse measurements using video imaging and blind source separation}}, volume = {18}, year = {2010} } @article{Meijer1989, abstract = {The present study was designed to assess the contribution of general features of gross body movements to the attribution of emotions. Eighty-five adult subjects were shown ninety-six videotaped body movements, performed by three actors. Each movement was determined by seven general dimensions: trunk movement, arm movement, vertical direction, sagittal direction, force, velocity and directness. Using rating scales, the subjects judged the compatibility of each movement with each of twelve emotion categories. The results showed which movement features predicted particular ratings. Emotion categories differed as to the amount, type, and weights of predicting movement features. Three factors were extracted from the original ratings and interpreted as Rejection-Acceptance, Withdrawal-Approach, and Preparation-Defeatedness.}, author = {Meijer, Marco}, doi = {10.1007/BF00990296}, issn = {01915886}, journal = {Journal of Nonverbal Behavior}, number = {4}, pages = {247--268}, publisher = {Springer}, title = {{The contribution of general features of body movement to the attribution of emotions}}, url = {http://www.springerlink.com/index/10.1007/BF00990296}, volume = {13}, year = {1989} } @inproceedings{Gilroy2011, address = {New York, New York, USA}, author = {Gilroy, Stephen W. and Cavazza, Marc O. and Vervondel, Valentin}, booktitle = {Proceedings of the 16th international conference on Intelligent user interfaces - IUI '11}, doi = {10.1145/1943403.1943413}, isbn = {9781450304191}, pages = {53--62}, publisher = {ACM Press}, title = {{Evaluating multimodal affective fusion using physiological signals}}, url = {http://portal.acm.org/citation.cfm?doid=1943403.1943413}, year = {2011} } @article{Blairy1999, abstract = {Lipps (1907) presented a model of empathy which had an important influence on later formulations. According to Lipps, individuals tend to mimic an interaction partner's behavior, and this nonverbal mimicry induces—via a feedback process—the corresponding affective state in the observer. The resulting shared affect is believed to foster the understanding of the observed person's self. The present study tested this model in the context of judgments of emotional facial expressions. The results confirm that individuals mimic emotional facial expressions, and that the decoding of facial expressions is accompanied by shared affect. However, no evidence that emotion recognition accuracy or shared affect are mediated by mimicry was found. Yet, voluntary mimicry was found to have some limited influence on observer' s assessment of the observed person's personality. The implications of these results with regard to Lipps' original hypothesis are discussed.}, author = {Blairy, Sylvie and Herrera, Pedro and Hess, Ursula}, doi = {10.1023/A:1021370825283}, file = {::}, journal = {Journal of Nonverbal Behavior}, number = {1}, pages = {5--41}, title = {{Mimicry and the Judgment of Emotional Facial Expressions}}, url = {http://www.springerlink.com/content/unx02r46695w7651/ http://dx.doi.org/10.1023/A:1021370825283}, volume = {23}, year = {1999} } @book{C.E.Osgood1975, author = {{C.E. Osgood} and May, W.H. and Miron, M.S.}, publisher = {University of Illinois Press}, title = {{Cross-Cultural Universals of Affective Meaning}}, year = {1975} } @article{Pardas2002, abstract = {The video analysis system described in this paper aims at facial expression recognition consistent with the MPEG4 standardized parameters for facial animation, FAP. For this reason, two levels of analysis are necessary: low level analysis to extract the MPEG4 compliant parameters and high level analysis to estimate the expression of the sequence using these low level parameters. The low level analysis is based on an improved active contour algorithm that uses high level information based on Principal Component Analysis to locate the most significant contours of the face (eyebrows and mouth), and on motion estimation to track them. The high level analysis takes as input the FAP produced by the low level analysis tool and, by means of a Hidden Markov Model classifier, detects the expression of the sequence.}, author = {Pard\`{a}s, Montse and Bonafonte, Antonio}, doi = {10.1016/S0923-5965(02)00078-4}, file = {::}, issn = {09235965}, journal = {Signal Processing: Image Communication}, month = oct, number = {9}, pages = {675--688}, title = {{Facial animation parameters extraction and expression recognition using Hidden Markov Models}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0923596502000784}, volume = {17}, year = {2002} } @inproceedings{Polajnar2011, author = {Polajnar, Jernej and Dalvandi, B. and Polajnar, D.}, booktitle = {Cognitive Informatics \& Cognitive Computing (ICCI'CC'11), 2011 10th IEEE International Conference on}, file = {::}, isbn = {9781457716973}, pages = {96--102}, publisher = {IEEE}, title = {{Does empathy between artificial agents improve agent teamwork?}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=6016126}, year = {2011} } @article{Riek2009, author = {Riek, Laurel D. and Paul, Philip C. and Robinson, Peter}, doi = {10.1007/s12193-009-0028-2}, file = {::}, issn = {1783-7677}, journal = {Journal on Multimodal User Interfaces}, keywords = {19,affective computing,emotionally conveying,empathy,expressions,facial,forms of expressive empathy,human-robot interaction,is known as,of the most basic,one,social robotics,understand what others are}, month = nov, number = {1-2}, pages = {99--108}, title = {{When my robot smiles at me: Enabling human-robot rapport via real-time head gesture mimicry}}, url = {http://www.springerlink.com/index/10.1007/s12193-009-0028-2}, volume = {3}, year = {2009} } @article{Segal2011, author = {Segal, Elizabeth}, doi = {10.1080/01488376.2011.564040}, file = {::}, issn = {0148-8376}, journal = {Journal of Social Service Research}, keywords = {a dedication to justice,a nation that proclaims,and social well-being and,civic involvement,empathy,scapegoating,social empathy,social responsibility,the united states is}, month = may, number = {3}, pages = {266--277}, title = {{Social Empathy: A Model Built on Empathy, Contextual Understanding, and Social Responsibility That Promotes Social Justice}}, url = {http://www.informaworld.com/openurl?genre=article\&doi=10.1080/01488376.2011.564040\&magic=crossref||D404A21C5BB053405B1A640AFFD44AE3}, volume = {37}, year = {2011} } @article{Lisetti2013, author = {Lisetti, Christine and Amini, Reza and Yasavur, Ugan and Rishe, Naphtali}, doi = {10.1145/2544103}, file = {::}, journal = {ACMTransactions on Management Information Systems}, number = {4}, pages = {1--28}, title = {{I Can Help You Change ! An Empathic Virtual Agent Delivers Behavior}}, volume = {4}, year = {2013} } @inproceedings{Sourina2011a, abstract = {To make human computer interfaces more immersive and intuitive, a new dimension could be added. Real-time brain state recognition from EEG in- cluding emotion recognition and level of concentration recognition would make an access to information more adaptive and personalized. Modern EEG tech- niques give us an easy and portable way to monitor brain activities by using suitable signal processing and classification methods and algorithms. We pro- posed a new subject-dependent fractal-based approach to brain state recognition and innovative applications based on EEG-enable user’s interaction. The algo- rithms of the “inner” brain state quantification including emotion recognition would advance research on human computer interaction bringing the proposed novel objective quantification methods and algorithms as new tools in medical, entertainment, and even digital art methodology applications, and allowing us an integration of the brain state quantification algorithms in the human com- puter interfaces. In this paper, we describe our fractal-based approach to the brain state recognition and its EEG-enable applications such as serious games, emotional avatar, music therapy, music player, and storytelling.}, author = {Sourina, Olga and Liu, Yisi and Wang, Qiang and Nguyen, Minh Khoa}, booktitle = {Proceedings of the 6th international conference on Universal access in human-computer interaction: users diversity - Volume Part II (UAHCI'11)}, file = {::}, keywords = {BCI,HCI,emotion recognition,fractal dimension,music therapy,serious game,storytelling.}, pages = {591--599}, publisher = {Springer-Verlag Berlin, Heidelberg}, title = {{EEG-Based Personalized Digital Experience}}, year = {2011} } @book{Davis1994, author = {Davis, Mark H.}, isbn = {0697168948}, publisher = {Westview Press}, title = {{Empathy: A social psychological approach}}, year = {1994} } @article{James1884, abstract = {The physiologists who , during the past few years , have been so industriously exploring the functions of the brain , have limited their attempts at explanation to its cognitive and volitional per- formances . Dividing the brain into sensorial and motor centers , they have found their division to be exactly paralleled by the analysis made by empirical psychology , of the perceptive and volitional parts of the mind into their simplest elements . But the aesthetic sphere of the mind , its longings , its pleasures and pains , and its emotions , have been so ignored in all these researches that one is tempted to suppose that if either Dr . Ferrier or Dr . Munk were asked for a theory in brain-terms of the latter mental facts , they might both reply , either that they had as yet bestowed no thought upon the subject , or that they had found it so difficult to make distinct hypotheses , that the matter lay for them among the problems of the future , only to be taken up after the simpler ones of the present should have been definitely solved . And yet it is even now certain that of two things concerning the emotions , one must be true . Either separate and special centers affected to them alone , are their brain-seat , or else they correspond to processes occurring in the motor and sensory centers , already assigned , or in others like them , not yet mapped out . If the for- mer be the case we must deny the current view , and hold the cortex to be something more than the surface of projection for every sensitive spot and every muscle in the body . If the latter be the case , we must ask whether the emotional process in the sensory or motor center be an altogether peculiar one , or whether it resembles the ordinary perceptive processes of which those centers are already recognized to be the seat . The purpose of the following pages is to show that the last alternative comes nearest to the truth , and that the emotional brain-processes not only}, author = {James, William}, chapter = {188}, issn = {00264423}, journal = {Mind}, number = {34}, pages = {188--205}, publisher = {JSTOR}, title = {{What is an Emotion?}}, url = {http://www.jstor.org/stable/2246769}, volume = {9}, year = {1884} } @misc{TheMendeleySupportTeam2011, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, author = {{The Mendeley Support Team}}, booktitle = {Mendeley Desktop}, file = {::}, keywords = {Mendeley,how-to,user manual}, pages = {1--16}, publisher = {Mendeley Ltd.}, title = {{Getting Started with Mendeley}}, url = {http://www.mendeley.com}, year = {2011} } @inproceedings{Kang2008a, author = {Kang, Sin-hwa and Gratch, Jonathan and Wang, Ning and Watt, J.}, booktitle = {Intelligent Virtual Agents}, file = {::}, keywords = {evaluation,nonverbal feedback,personality,rapport,virtual agents}, pages = {253--261}, publisher = {Springer}, title = {{Agreeable people like agreeable virtual humans}}, url = {http://www.springerlink.com/index/DT61V8556710VW13.pdf}, year = {2008} } @article{Happ2011, author = {Happ, Christian and Melzer, Andr\'{e}}, file = {::}, journal = {Ifip International Federation For Information Processing}, keywords = {1,1 prosocial and antisocial,aggression,anderson and his colleagues,confirmed that video game,effects of video games,empathy,furthermore,in a recent overview,prosocial behavior,related to indicators of,video games,violence exposure is positively}, pages = {371--374}, title = {{Bringing Empathy into Play: On the Effects of Empathy in Violent and Nonviolent Video Games}}, url = {http://www.springerlink.com/index/P76556V1HN316RK6.pdf}, year = {2011} } @inproceedings{Pontier2008, abstract = {Previous research indicates that self-help therapy is an effective method to prevent and treat unipolar depression. While web-based self-help therapy has many advantages, there are also disadvantages to self-help therapy, such as that it misses the possibility to regard the body language of the user, and the lack of personal feedback on the user responses. This study presents a virtual agent that guides the user through the Beck Depression Inventory (BDI) questionnaire, which is used to measure the severity of depression. The agent responds empathically to the answers given by the user, by changing its facial expression. This resembles face to face therapy more than existing web-based self-help therapies. A pilot experiment indicates that the virtual agent has added value for this application.}, author = {Pontier, Matthijs and Siddiqui, Ghazanfar F}, booktitle = {Proceedings of the 8th international conference on Intelligent Virtual Agents (IVA)}, doi = {10.1007/978-3-540-85483-8\_42}, editor = {{H. Prendinger, J. Lester}, and M. Ishizuka}, file = {::}, keywords = {Emotion modeling,Self-help therapy,Virtual agent}, pages = {417--425}, publisher = {Springer-Verlag Berlin Heidelberg}, title = {{A Virtual Therapist That Responds Empathically to Your Answers}}, year = {2008} } @article{Kim2004, abstract = {The present study attempted to develop new scales of patient-perceived, empathy-related constructs and to test a model of the relationships of physician empathy and related constructs to patient satisfaction and compliance. Five hundred fifty outpatients at a large university hospital in Korea were interviewed with the questionnaire. The data were analyzed using structural equation modeling. Patient-perceived physician empathy significantly influenced patient satisfaction and compliance via the mediating factors of information exchange, perceived expertise, inter-personal trust, and partnership. Improving physician empathic communication skills should increase patient satisfaction and compliance. Health providers who wish to improve patient satisfaction and compliance should first identify components of their empathic communication needing improvement and then try to refine their skills to better serve patients.}, author = {Kim, Sung Soo and Kaplowitz, Stan and Johnston, Mark V}, issn = {01632787}, journal = {Evaluation the health professions}, keywords = {communication,empathy,humans,korea,patient compliance,patient satisfaction,physician patient relations,questionnaires}, number = {3}, pages = {237--51}, title = {{The effects of physician empathy on patient satisfaction and compliance.}}, url = {http://ehp.sagepub.com/content/27/3/237.short}, volume = {27}, year = {2004} } @inproceedings{Dias2005, abstract = {Interactive virtual environments (IVEs) are now seen as an engaging new way by which children learn experimental sciences and other disciplines. These environments are populated by synthetic characters that guide and stimulate the children activities. In order to build such environments, one needs to address the problem of how achieve believable and empathic characters that act autonomously. Inspired by the work of traditional character animators, this paper proposes an architectural model to build autonomous characters where the agent’s reasoning and behaviour is influenced by its emotional state and personality. We performed a small case evaluation in order to determine if the characters evoked empathic reactions in the users with positive results.}, address = {Covilh\~{a}, Portugal}, author = {Dias, J. and Paiva, Ana}, booktitle = {EPIA 2005, 12th Portuguese Conference on Artificial Intelligence}, doi = {10.1007/11595014\_13}, editor = {Bento, Carlos and Cardoso, Am\'{\i}lcar and Dias, Ga\"{e}l}, file = {::}, pages = {127--140}, publisher = {Springer Berlin / Heidelberg}, title = {{Feeling and reasoning: A computational model for emotional characters}}, url = {http://www.springerlink.com/index/YQ18H62602413554.pdf}, year = {2005} } @article{Davis1983, author = {Davis, Mark H.}, doi = {10.1037/0022-3514.44.1.113}, file = {::}, issn = {0022-3514}, journal = {Journal of Personality and Social Psychology}, number = {1}, pages = {113--126}, title = {{Measuring individual differences in empathy: Evidence for a multidimensional approach.}}, volume = {44}, year = {1983} } @article{Liu2010, abstract = {Emotions accompany everyone in the daily life, playing a key role in non-verbal communication, and they are essential to the understanding of human behavior. Emotion recognition could be done from the text, speech, facial expression or gesture. In this paper, we concentrate on recognition of “inner” emotions from electroencephalogram (EEG) signals as humans could control their facial expressions or vocal intonation. The need and importance of the automatic emotion recognition from EEG signals has grown with increasing role of brain computer interface applications and development of new forms of human-centric and humandriven interaction with digital media. We propose fractal dimension based algorithm of quantification of basic emotions and describe its implementation as a feedback in 3D virtual environments. The user emotions are recognized and visualized in real time on his/her avatar adding one more so-called “emotion dimension” to human computer interfaces.}, author = {Liu, Yisi and Sourina, Olga and Nguyen, Minh Khoa}, doi = {10.1109/CW.2010.37}, file = {::}, isbn = {978-1-4244-8301-3}, journal = {2010 International Conference on Cyberworlds}, keywords = {BCI,EEG,HCI,emotion recognition,emotion visualization,fractal dimension}, month = oct, pages = {262--269}, publisher = {Ieee}, title = {{Real-Time EEG-Based Human Emotion Recognition and Visualization}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5656346}, year = {2010} } @article{Becker-Asano2009, abstract = {We introduce theWASABI ([W]ASABI [A]ffect [S]imulation for [A]gents with [B]elievable [I]nteractivity)Affect SimulationArchitecture, in which a virtual human’s cog- nitive reasoning capabilities are combined with simulated embodiment to achieve the sim- ulation of primary and secondary emotions. In modeling primary emotions we follow the idea of “Core Affect” in combination with a continuous progression of bodily feeling in three-dimensional emotion space (PADspace), that is subsequently categorized into discrete emotions. In humans, primary emotions are understood as onto-genetically earlier emotions, which directly influence facial expressions. Secondary emotions, in contrast, afford the abil- ity to reason about current events in the light of experiences and expectations.}, author = {Becker-Asano, Christian and Wachsmuth, Ipke}, doi = {10.1007/s10458-009-9094-9}, file = {::}, issn = {1387-2532}, journal = {Autonomous Agents and Multi-Agent Systems}, keywords = {affect simulation,affective computing,affective gaming,architecture,aware emotions,bdi-based architecture,embodied agent,emotion dynamics,emotion expression,emotion modeling,pad emotion space,primary and secondary emotions,reality,virtual,virtual human}, month = may, number = {1}, pages = {32--49}, title = {{Affective computing with primary and secondary emotions in a virtual human}}, url = {http://www.springerlink.com/index/10.1007/s10458-009-9094-9}, volume = {20}, year = {2009} } @incollection{Battista2000, abstract = {MPEG-4 (formally ISO/IEC international standard 14496) defines a multimedia system for the interoperable communication of complex scenes containing audio, video, synthetic audio and graphics material. In this article, we provide a comprehensive overview of the technical elements of the Moving Pictures Expert Group's MPEG-4 multimedia system specification}, author = {Battista, S and Casalino, F and Lande, C}, booktitle = {IEEE Multimedia}, doi = {10.1109/93.839314}, issn = {1070986X}, number = {1}, pages = {74--83}, publisher = {IEEE}, title = {{MPEG-4: a multimedia standard for the third millennium, Part 1.}}, volume = {7}, year = {2000} } @article{Archer1977, author = {Archer, Dane and Akert, Robin M}, journal = {Journal of Personality and Social Psychology}, number = {6}, pages = {443--449}, title = {{Words and everything else: Verbal and nonverbal cues in social interpretation}}, volume = {35}, year = {1977} } @article{Roberts1996, author = {Roberts, William and Strayer, Janet}, doi = {10.2307/1131826}, file = {::}, issn = {00093920}, journal = {Child Development}, month = apr, number = {2}, pages = {449}, title = {{Empathy, Emotional Expressiveness, and Prosocial Behavior}}, url = {http://www.jstor.org/stable/1131826?origin=crossref}, volume = {67}, year = {1996} } @inproceedings{DeCarolis2010, abstract = {As far as interaction is concerned Ambient Intelligence (AmI) research emphasizes the need of natural and friendly interfaces for accessing services provided by the environment. In this paper we present the result of an experimental study aiming at understanding whether Embodied Conversational Agents (ECAs) and Social Robots may improve the naturalness and effectiveness of interaction by playing different roles when acting as interface between users and smart environment services. Results obtained so far show that ECAs seem to have a better evaluation than robots for information related tasks. On the other side, Social Robots are preferred for welcoming people and for guiding them in the smart environment, due to their possibility to move and to the perceived sense of presence. Moreover, the robot seems to elicit a more positive evaluation in terms of user experience.}, address = {New York, New York, USA}, author = {{De Carolis}, Berardina and Mazzotta, Irene and Novielli, Nicole and Pizzutilo, Sebastiano}, booktitle = {Proceedings of the International Conference on Advanced Visual Interfaces - AVI '10}, doi = {10.1145/1842993.1843041}, file = {::}, isbn = {9781450300766}, keywords = {animated interfaces,interface evaluation}, pages = {275--278}, publisher = {ACM Press}, title = {{Social robots and ECAs for accessing smart environments services}}, url = {http://portal.acm.org/citation.cfm?doid=1842993.1843041}, year = {2010} } @inproceedings{Morency2008, abstract = {During face-to-face interactions, listeners use backchannel feedback such as head nods as a signal to the speaker that the communication is working and that they should continue speaking. Predicting these backchannel opportunities is an important milestone for building engaging and natural virtual humans. In this paper we show how sequential probabilistic models (e.g., Hidden Markov Model or Conditional Random Fields) can automatically learn from a database of human-to-human interactions to predict listener backchannels using the speaker multimodal output features (e.g., prosody, spoken words and eye gaze). The main challenges addressed in this paper are automatic selection of the relevant features and optimal feature representation for probabilistic models. For prediction of visual backchannel cues (i.e., head nods), our prediction model shows a statistically significant improvement over a previously published approach based on hand-crafted rules.}, address = {Tokyo, Japan}, author = {Morency, LP and Kok, Iwan De and Gratch, Jonathan}, booktitle = {8th International Conference on Intelligent Virtual Agents(IVA'08}, doi = {10.1007/978-3-540-85483-8\_18}, file = {::}, pages = {176--190}, publisher = {Springer Berlin Heidelberg}, title = {{Predicting listener backchannels: A probabilistic multimodal approach}}, url = {http://www.springerlink.com/index/180267KR7P8PT321.pdf}, year = {2008} } @article{Zhang2009, abstract = {This paper presents a new anthropometrics-based method for generating realistic, controllable face models. Our method establishes an intuitive and efficient interface to facilitate procedures for interactive 3D face modeling and editing. It takes 3D face scans as examples in order to exploit the variations presented in the real faces of individuals. The system automatically learns a model prior from the data-sets of example meshes of facial features using principal component analysis (PCA) and uses it to regulate the naturalness of synthesized faces. For each facial feature, we compute a set of anthropometric measurements to parameterize the example meshes into a measurement space. Using PCA coefficients as a compact shape representation, we formulate the face modeling problem in a scattered data interpolation framework which takes the user-specified anthropometric parameters as input. Solving the interpolation problem in a reduced subspace allows us to generate a natural face shape that satisfies the user-specified constraints. At runtime, the new face shape can be generated at an interactive rate.We demonstrate the utility of our method by presenting several applications, including analysis of facial features of subjects in different race groups, facial feature transfer, and adapting face models to a particular population group.}, author = {Zhang, Yu and Prakash, Edmond C.}, doi = {10.1155/2009/573924}, issn = {1687-7047}, journal = {International Journal of Computer Games Technology}, pages = {1--15}, title = {{Face to Face: Anthropometry-Based Interactive Face Shape Modeling Using Model Priors}}, url = {http://www.hindawi.com/journals/ijcgt/2009/573924/}, volume = {2009}, year = {2009} } @incollection{Hoffman1987, address = {Cambridge}, author = {Hoffman, Martin L.}, booktitle = {Empathy and its development}, editor = {Eisenberg, N. and Strayer, J.}, pages = {47--80}, publisher = {Cambridge University Press.}, title = {{The contribution of empathy to justice and moral judgment}}, year = {1987} } @phdthesis{Beutl2011, author = {Beutl, Leon}, file = {::}, school = {University of Wien}, title = {{A simulation for the creation of soft-looking, realistic facial expressions}}, type = {Master Thesis}, year = {2011} } @book{Hojat2007, author = {Hojat, Mohammadreza}, booktitle = {Patient Care}, isbn = {9780387336077}, publisher = {New York, NY: Springer}, title = {{Empathy in patient care: antecedents, development, measurement, and outcomes}}, year = {2007} } @article{Berry1997, author = {Berry, D S and Pennebaker, James W and Mueller, J S and Hiller, W S}, doi = {10.1177/0146167297235008}, issn = {01461672}, journal = {Personality and Social Psychology Bulletin}, number = {5}, pages = {526--537}, title = {{Linguistic Bases of Social Perception}}, url = {http://psp.sagepub.com/cgi/doi/10.1177/0146167297235008}, volume = {23}, year = {1997} } @inproceedings{Gama2011, abstract = {Over the last decade extensive research has been conducted in the area of conversational agents focusing in many different aspects of these agents. In this research, and aiming at building agents that maintain a social connection with users, empathy has been one of those areas, as it plays a leading role in the establishment of social relationships. In this paper we present a relationship model of empathy that takes advantage of Social Penetration Theory's concepts for relationship building. This model has been implemented into an agent that attempts to establish a relationship with the user, expressing empathy both verbally and visually. The visual expression of empathy consists of facial expression and physical proximity representation. The user tests performed showed that while users were able to develop a simple relationship with the agents, they however developed stronger relationships with a version of the agent that is most visually expressive and takes advantage of the proximity element, confirming the significance of our model based on social penetration theory may have and, consequently, the importance of the visual representation of empathic responses.}, address = {Memphis, TN, USA}, author = {Gama, Sandra and Barata, Gabriel and Gon\c{c}alves, D. and Prada, R. and Paiva, Ana}, booktitle = {ACII'11 Proceedings of the 4th international conference on Affective computing and intelligent interaction - Volume Part I}, doi = {10.1007/978-3-642-24600-5\_54}, editor = {{D'Mello, Sidney K. and Graesser, Arthur C. and Schuller, Bj\"{o}rn and Martin}, Jean-Claude}, file = {::}, keywords = {affective computing,conversational agent,empathic agent}, pages = {507--516}, publisher = {Springer Berlin / Heidelberg}, title = {{SARA: social affective relational agent: a study on the role of empathy in artificial social agents}}, url = {http://www.springerlink.com/content/g0433kx744258w62/}, year = {2011} } @article{Woods1970, abstract = {The use of augmented transition network grammars for the analysis of natural language sentences is described. Struc- ture-building actions associated with the arcs of the gram- mar network allow for the reordering, restructuring, and copy- ing of constituents necessary to produce deep-structure repre- sentations of the type normally obtained from a transforma- tional analysis, and conditions on the arcs allow for a powerful selectivity which can rule out meaningless analyses and take advantage of semantic information to guide the parsing. The advantages of this model for natural language analysis are discussed in detail and illustrated by examples. An imple- mentation of an experimental parsing system for transition network grammars is briefly described.}, author = {Woods, W A}, doi = {10.1145/355598.362773}, editor = {Grosz, Barbara and Jones, Karen and Webber, Bonnie}, file = {::}, issn = {00010782}, journal = {Communications of the ACM}, number = {10}, pages = {591--606}, publisher = {ASSOC COMPUTING MACHINERY}, title = {{Transition Network Grammars for Natural Language Analysis}}, url = {http://portal.acm.org/citation.cfm?doid=355598.362773}, volume = {13}, year = {1970} } @article{Rogers1957, author = {Rogers, C R}, editor = {Kirschenbaum, H}, isbn = {9780395483572}, issn = {00958891}, journal = {Journal of Consulting Psychology}, number = {2}, pages = {95--103}, pmid = {13416422}, publisher = {Houghton Mifflin}, title = {{The necessary and sufficient conditions of therapeutic personality change}}, volume = {21}, year = {1957} } @incollection{WeinerBGraham1984, address = {New York}, author = {{Weiner B Graham}, S}, booktitle = {Emotions, cognition, and behavior}, editor = {Izard, Carroll E and Kagan, J and Zajonc, Robert B}, pages = {167--191}, publisher = {Cambridge University Press}, title = {{An attributional approach to emotional development}}, year = {1984} } @article{Rabiner1989, author = {Rabiner, Lawrence R.}, file = {::}, journal = {Proceedings of the IEEE}, number = {2}, pages = {257--286}, title = {{A tutorial on hidden Markov models and selected applications in speech recognition}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=18626}, volume = {77}, year = {1989} } @article{Gupta2012, author = {Gupta, Prabodh and Jhala, Darshana and Jhala, Nirag}, doi = {10.1309/AJCPLAE62CRYYXNW}, file = {::}, issn = {1943-7722}, journal = {American journal of clinical pathology}, month = jan, number = {1}, pages = {160}, pmid = {22180490}, title = {{Book review.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/22180490}, volume = {137}, year = {2002} } @article{Cooper2003, abstract = {BACKGROUND: African-American patients who visit physicians of the same race rate their medical visits as more satisfying and participatory than do those who see physicians of other races. Little research has investigated the communication process in race-concordant and race-discordant medical visits. OBJECTIVES: To compare patient-physician communication in race-concordant and race-discordant visits and examine whether communication behaviors explain differences in patient ratings of satisfaction and participatory decision making. DESIGN: Cohort study with follow-up using previsit and postvisit surveys and audiotape analysis. SETTING: 16 urban primary care practices. PATIENTS: 252 adults (142 African-American patients and 110 white patients) receiving care from 31 physicians (of whom 18 were African-American and 13 were white). MEASUREMENTS: Audiotape measures of patient-centeredness, patient ratings of physicians' participatory decision-making styles, and overall satisfaction. RESULTS: Race-concordant visits were longer (2.15 minutes 95\% CI, 0.60 to 3.71) and had higher ratings of patient positive affect (0.55 point, 95\% CI, 0.04 to 1.05) compared with race-discordant visits. Patients in race-concordant visits were more satisfied and rated their physicians as more participatory (8.42 points 95\% CI, 3.23 to 13.60). Audiotape measures of patient-centered communication behaviors did not explain differences in participatory decision making or satisfaction between race-concordant and race-discordant visits. CONCLUSIONS: Race-concordant visits are longer and characterized by more patient positive affect. Previous studies link similar communication findings to continuity of care. The association between race concordance and higher patient ratings of care is independent of patient-centered communication, suggesting that other factors, such as patient and physician attitudes, may mediate the relationship. Until more evidence is available regarding the mechanisms of this relationship and the effectiveness of intercultural communication skills programs, increasing ethnic diversity among physicians may be the most direct strategy to improve health care experiences for members of ethnic minority groups.}, author = {Cooper, Lisa A and Roter, Debra L and Johnson, Rachel L and Ford, Daniel E and Steinwachs, Donald M and Powe, Neil R}, institution = {Johns Hopkins University School of Medicine and the Welch Center for Prevention, Epidemiology, and Clinical Research, Johns Hopkins University, Baltimore, Maryland 21205-2223, USA. lisa.cooper@jhmi.edu}, journal = {Annals of Internal Medicine}, keywords = {empirical approach,professional patient relationship}, number = {11}, pages = {907--915}, pmid = {14644893}, title = {{Patient-centered communication, ratings of care, and concordance of patient and physician race.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/14644893}, volume = {139}, year = {2003} } @article{Ekman1993, abstract = {Cross-cultural research on facial expression and the developments of methods to measure facial expression are briefly summarized. What has been learned about emotion from this work on the face is then elucidated. Four questions about facial expression and emotion are discussed: What information does an expression typically convey? Can there be emotion without facial expression? Can there be a facial expression of emotion without emotion? How do individuals differ in their facial expressions of emotion?}, author = {Ekman, Paul}, institution = {Human Interaction Laboratory, University of California, San Francisco 94143.}, journal = {American Psychologist}, number = {4}, pages = {384--392}, pmid = {8512154}, publisher = {American Psychological Association}, title = {{Facial expression and emotion.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/8512154}, volume = {48}, year = {1993} } @article{Moridis2012a, abstract = {—Empathetic behavior has been suggested to be one effective way for Embodied Conversational Agents (ECAs) to provide feedback to learners’ emotions. An issue that has been raised is the effective integration of parallel and reactive empathy. The aim of this study is to examine the impact of ECAs’ emotional facial and tone of voice expressions combined with empathetic verbal behavior when displayed as feedback to students’ fear, sad, and happy emotions in the context of a self-assessment test. Three identical female agents were used for this experiment: 1) an ECA performing parallel empathy combined with neutral emotional expressions, 2) an ECA performing parallel empathy displaying emotional expressions that were relevant to the emotional state of the student, and 3) an ECA performing parallel empathy by displaying relevant emotional expressions followed by emotional expressions of reactive empathy with the goal of altering the student’s emotional state. Results indicate that an agent performing parallel empathy displaying emotional expressions relevant to the emotional state of the student may cause this emotion to persist. Moreover, the agent performing parallel and then reactive empathy appeared to be effective in altering an emotional state of fear to a neutral one.}, author = {Moridis, Christos N and Economides, Anastasios A and Member, Senior}, file = {::}, journal = {IEEE Transactions on Affective Computing}, keywords = {empathy,intelligent agents,user interfaces,—Computers and education}, number = {3}, pages = {260--272}, title = {{Affective Learning : Empathetic Agents with Emotional Facial and Tone of Voice Expressions}}, volume = {3}, year = {2012} } @book{Widmark1981, address = {Davis, California}, author = {Widmark, Erik Matteo Prochet}, isbn = {0931890071, 9780931890079}, pages = {163}, publisher = {Biomedical Publications}, title = {{Principles and Applications of Medicolegal Alcohol Determination}}, year = {1981} } @inproceedings{Boukricha2007, abstract = {Addressing user’s emotions in human-computer interaction significantly enhances the believability and lifelikeness of virtual humans. Emotion recognition and interpretation is realized in our approach by integrating empathy as a designated process within the agent’s cognitive architecture. In this paper we describe this empathy process which comprises of two interconnected components: a belief-desire-intention (BDI) based cognitive component and an affective component based on the emotion simulation system of the virtual human Max.}, address = {Osnabr\"{u}ck, Germany}, author = {Boukricha, Hana and Becker-Asano, Christian}, booktitle = {Proceedings of the 2nd Workshop at KI2007 on Emotion and Computing – Current Research and Future Impact}, editor = {{Dirk Reichardt} and Levi, Paul}, pages = {23--28}, title = {{Simulating empathy for the virtual human max}}, url = {http://wwwlehre.dhbw-stuttgart.de/~reichard/itemotion/2007/}, year = {2007} } @article{Boucouvalas2003, abstract = {In this work we focus on demonstrating a real time communication interface which enhances text communication by detecting from real time typed text, the extracted emotions, and displaying on the screen appropriate facial expression images in real time. The displayed expressions are represented in terms of expressive images or sketches of the communicating persons. This interface makes use of a developed real time emotion extraction engine from text. The emotion extraction engine and extraction rules are discussed together with a description of the interface, its limits and future direction of such interface. The extracted emotions are mapped into displayed facial expressions. Such interface can be used as a platform for a number of future CMC experiments. The developed online communication interface brings together remotely located collaborating parties in a shared electronic spacefor their communication. In its current state the interface allows the participant to see at a glance all other online participants and all those who are engaged in communications. An important aspect of the interface is that for two users engaged in communication, the interface locally extracts emotional states from the content of typed textual sentences automatically. Subsequently it displays discrete expressions mapped from extracted emotions to the remote screen of the other person. It also analyses/extracts the intensity/duration of the emotional state. At the same time the users can also control their expression, if they wish, manually. The interface also uses text to speech synthesis, which allows the user to glance on other tasks while at the same time listening to the communication. A shared whiteboard also allows the users to engage in collaborative work. Finally it is also possible to view your own expression (feedback) which is displayed and viewed by the other user, an add on feature not possible with face to face communication between two people.}, author = {Boucouvalas, Anthony C}, chapter = {21}, editor = {Riva, G and Davide, F and Jsselsteijn, W A I}, journal = {Emotion}, pages = {305--318}, publisher = {Ios Press}, title = {{Real Time Text-to-Emotion Engine for Expressive Internet Communications}}, url = {http://scholar.google.com/scholar?hl=en\&btnG=Search\&q=intitle:Real+Time+Text-to-Emotion+Engine+for+Expressive+Internet+Communications\#1}, volume = {5}, year = {2003} } @article{Yacoub2003, author = {Yacoub, Sherif and Simske, Steve and Lin, Xiaofan and Burns, John}, journal = {8th European Conference on Speech Communication and Technology}, number = {September}, pages = {1--4}, title = {{Recognition of emotions in interactive voice response systems}}, url = {http://www.isca-speech.org/archive/eurospeech\_2003/e03\_0729.html}, year = {2003} } @article{Scherer2005, abstract = {Defining emotion is a notorious problem. Without consensual conceptualization and operationalization of exactly what phenomenon is to be studied, progress in theory and research is difficult to achieve and fruitless debates are likely to proliferate. A particularly unfortunate example is William Jamess asking the question What is an emotion? when he really meant feeling, a misnomer that started a debate which is still ongoing, more than a century later. This contribution attempts to sensitize researchers in the social and behavioral sciences to the importance of definitional issues and their consequences for distinguishing related but fundamentally different affective processes, states, and traits. Links between scientific and folk concepts of emotion are explored and ways to measure emotion and its components are discussed.}, author = {Scherer, Klaus R.}, doi = {10.1177/0539018405058216}, issn = {05390184}, journal = {Social Science Information}, keywords = {affective processes,emotion,feeling,folk concepts emotion,measurement emotion,scientific concepts emotion}, number = {4}, pages = {695--729}, publisher = {Sage Publications}, title = {{What are emotions? And how can they be measured?}}, url = {http://ssi.sagepub.com/cgi/doi/10.1177/0539018405058216}, volume = {44}, year = {2005} } @inproceedings{Smith2010, abstract = {The development of Embodied Conversational Agents (ECA) as Companions brings several challenges for both affective and conversational dialogue. These include challenges in generating appropriate affective responses, selecting the overall shape of the dialogue, providing prompt system response times and handling interruptions. We present an implementation of such a Companion showing the development of individual modules that attempt to address these challenges. Further, to resolve resulting conflicts, we present encompassing interaction strategies that attempt to balance the competing requirements. Finally, we present dialogues from our working prototype to illustrate these interaction strategies in operation.}, author = {Smith, Cameron and Crook, Nigel and Boye, Johan and Charlton, Daniel and Dobnik, Simon and Pizzi, David and Cavazza, Marc and Pulman, Stephen}, booktitle = {IVA'10 Proceedings of the 10th international conference on Intelligent virtual agents}, file = {::}, keywords = {affective dialogue,companion,conversational dialogue,embodied conversational agents,interaction strategies,interruptions}, pages = {301--314}, publisher = {Springer-Verlag Berlin, Heidelberg}, title = {{Interaction Strategies for an Affective Conversational Agent}}, year = {2010} } @article{Tickle-Degnen1990, abstract = {The purpose of this article is to offer a conceptualization of rapport that has utility for identifiing the nonverbal correlates associated with rapport. We describe the nature of rapport in terms of a dynamic structure of three interrelating components: mutual attentiveness, positivity, and coor- dination. We propose that the relative weighting of these components in the experience of rapport changes over the course of a developing relationship between individuals. In early interactions, positivity and attentiveness are more heavily weighted than coordination, whereas in later interactions, coordination and attentiveness are the more heavily weighted components. Because of the gestalt nature of the experience of rapport, it is not easy to identifi nonverbal behavioral correlates of the components. We discuss two approaches to nonverbal measurement, molecular and molar, along with recommendations for their appropriate application in the study of rapport at different stages of an interpersonal relationship. We present a meta-analytic study that demon- strates the effect of nonverbal behavior, measured at the molecular level, on the positivity component of rapport, and we conclude with an outline of hypotheses relevant to the investigation of the nonverbal correlates of rapport.}, author = {Tickle-Degnen, L. and Rosenthal, Robert}, file = {::}, journal = {Psychological Inquiry}, number = {4}, pages = {285--293}, publisher = {Taylor \& Francis}, title = {{The nature of rapport and its nonverbal correlates}}, url = {http://www.tandfonline.com/doi/abs/10.1207/s15327965pli0104\_1}, volume = {1}, year = {1990} } @misc{Lisetti2004, abstract = {The development of an autonomous social robot, Cherry, is occurring in tandem with studies gaining potential user preferences, likes, dislikes, and perceptions of her features. Thus far, results have indicated that individuals 1) believe that service robots with emotion and personality capabilities would make them more acceptable in everyday roles in human life, 2) prefer that robots communicate via both human-like facial expressions, voice, and text-based media, 3) become more positive about the idea of service and social robots after exposure to the technology, and 4) find the appearance and facial features of Cherry pleasing. The results of these studies provide the basis for future research efforts, which are discussed.}, author = {Lisetti, Christine L and Brown, S M Brown S M and Alvarez, K Alvarez K and Marpaung, A H Marpaung A H}, booktitle = {IEEE Transactions on Systems Man and Cybernetics Part C Applications and Reviews}, doi = {10.1109/TSMCC.2004.826278}, issn = {10946977}, number = {2}, pages = {195--209}, publisher = {IEEE}, title = {{A social informatics approach to human-robot interaction with a service social robot}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=1291667}, volume = {34}, year = {2004} } @book{Mowrer1960, address = {New York}, author = {Mowrer, Orval Hobart}, pages = {555}, publisher = {Wiley}, title = {{Learning theory and behavior}}, year = {1960} } @article{Mendelson1999, author = {Mendelson, M.J. and Aboud, F.E.}, file = {::}, journal = {Canadian Journal of Behavioural Science/Revue canadienne des sciences du comportement}, number = {2}, pages = {130}, publisher = {Canadian Psychological Association}, title = {{Measuring friendship quality in late adolescents and young adults: McGill Friendship Questionnaires.}}, url = {http://psycnet.apa.org/journals/cbs/31/2/130/}, volume = {31}, year = {1999} } @article{Ward2000, abstract = {Back-channel feedback, responses such as uh-uh from a listener, is a pervasive feature of conversation. It has long been thought that the production of back-channel feedback depends to a large extent on the actions of the other conversation partner, not just on the volition of the one who produces them. In particular, prosodic cues from the speaker have long been thought to play a role, but have so far eluded identification. We have earlier suggested that an important prosodic cue involved, in both English and Japanese, is a region of low pitch late in an utterance (Ward, 1996). This paper discusses issues in the definition of back-channel feedback, presents evidence for our claim, surveys other factors which elicit or inhibit back-channel responses, and mentions a few related phenomena and theoretical issues. (C) 2000 Elsevier Science B.V. All rights reserved.}, author = {Ward, Nigel and Tsukahara, Wataru}, doi = {10.1016/S0378-2166(99)00109-5}, issn = {03782166}, journal = {Journal of Pragmatics}, number = {8}, pages = {1177--1207}, publisher = {Elsevier}, title = {{Prosodic features which cue back-channel responses in English and Japanese}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0378216699001095}, volume = {32}, year = {2000} } @article{Cliffordson2002, abstract = {The purpose of the present study was to examine the structure of empathy using a hierarchical approach, and to compare the dimensions of empathy with measures of social functioning, in order to contribute to the understanding of the nature of empathy. The dimensionality of the Interpersonal Reactivity Index, which comprises four subscales (empathic concern, perspective taking, fantasy and personal distress) was examined using confirmatory factor analysis. Relations with the Social Skills Inventory were also investigated. A sample of 127 applicants for places on nursing and social work undergraduate programs participated in the study. The study findings indicate that empathy is hierarchically organized, with one general dimension at the apex. The general factor is identical to empathic concern and this dimension overlaps to a great extent with perspective taking and fantasy. The findings also indicate that the general dimension constitutes an integrated entirety, with its main emphasis on emotional reactivity by also involving cognitive processes.}, author = {Cliffordson, Christina}, doi = {10.1111/1467-9450.00268}, file = {::}, issn = {0036-5564}, journal = {Scandinavian journal of psychology}, keywords = {Empathy,Factor Analysis,Humans,Social Behavior,Statistical}, month = feb, number = {1}, pages = {49--59}, pmid = {11885760}, title = {{The hierarchical structure of empathy: dimensional organization and relations to social functioning}}, url = {http://onlinelibrary.wiley.com/doi/10.1111/1467-9450.00268/abstract}, volume = {43}, year = {2002} } @article{Larsen1992, abstract = {TOC The structural bases of emotional behavior James R. Averill - Promises and problems with the circumplex model of emotion Randy J. Larsen and Edward Diener - The complexity of intensity Nico H. Frijda, Andrew Ortony, Joep Sonnemans, and Gerald L. Clore - The behavioral ecology and sociality of human faces Alan J. Firdlund - Appraisal as a cause of emotion Brian Parkinson and A.S.R. Manstead - Affective dynamics Robert Mauro - Cross-cultural similarities and differences in emotion and its representation Phillip R. Shaver, Shelley Wu, and Judith C. Schwartz - The process of emotional experience James D. Laird and Charles Bresler - Inhibitory effects of awareness on affective responding Robert F. Bornstein - A functional analysis of the role of mood in affective systems William N. Morris - Differentiating affect, mood, and emotion C. Daniel Batson, Laura L. Shaw, and Kathryn C. Oleson}, author = {Larsen, Randy J and Diener, Edward}, chapter = {2}, editor = {Clark, Margaret S}, isbn = {0803946139}, journal = {Review of Personality and Social Psychology}, number = {13}, pages = {25--59}, publisher = {Sage}, series = {Review of personality and social psychology; No. 13; 0270-1987}, title = {{Promises and problems with the circumplex model of emotion}}, url = {http://psycnet.apa.org/psycinfo/1992-97396-002}, volume = {13}, year = {1992} } @article{Caridakis2008, abstract = {As input they consider the image sequence of the recorded human behavior. Computer vision and image processing techniques are incorporated in order to detect cues needed for expressivity features extraction. Using multimodalities, the virtual agent mimics the human expressions. The multimodality of the approach lies in the fact that both facial and gestural aspects of the user’s behavior are analyzed and processed. The mimicry consists of perception, interpretation, planning and animation of the expressions shown by the human, resulting not in an exact duplicate rather than an expressive model of the user’s original behavior.}, author = {Caridakis, George and Raouzaiou, Amaryllis and Bevacqua, Elisabetta and Mancini, Maurizio and Karpouzis, Kostas and Malatesta, Lori and Pelachaud, Catherine}, doi = {10.1007/s10579-007-9057-1}, file = {::}, issn = {1574-020X}, journal = {International Language Resources and Evaluation Journal: Special issue on Multimodal Corpora For Modelling Human Multimodal Behavior}, keywords = {facial,gesture,mimicry,multimodal,virtual agent}, month = jan, number = {3-4}, pages = {367--388}, title = {{Virtual agent multimodal mimicry of humans}}, volume = {41}, year = {2008} } @inproceedings{Amini2012, abstract = {In this article, we present HapFACS 1.0, a new software/API for generating static and dynamic three-dimensional facial expressions based on the Facial Action Coding System (FACS). HapFACS pro- vides total control over the FACS Action Units (AUs) activated at all levels of intensity. HapFACS allows generating faces with an individual AU or composition of AUs activated unilaterally or bilat- erally with different intensities. The reliable and emotionally valid facial expressions can be generated on infinite number of faces in different ethnicities, genders, and ages using HapFACS to be used in numerous scientific areas including psychology, emotion, FACS learning, clinical, and neuroscience research.}, address = {Vienna, AUSTRIA}, author = {Amini, Reza and Yasavur, U and Lisetti, Christine L}, booktitle = {Proceedings of the ACM 3rd International Symposium on Facial Analysis and Animation (FAA'12)}, file = {::}, publisher = {ACM Press}, title = {{HapFACS 1.0: Software/API for Generating FACS-Based Facial Expressions}}, url = {http://ascl.cis.fiu.edu/uploads/1/3/4/2/13423859/amini-faa-2012.pdf}, year = {2012} } @article{Butow1997, abstract = {BACKGROUND: While the importance of providing individualised communication to cancer patients is now well recognised, little is known about the stability and validity of patients' expressed preferences for information and involvement in decision-making. This study explored the stability and possible predictors of such preferences over time. PATIENTS AND METHODS: Cancer patients seeing two Medical Oncologists in an out-patient clinic at an Australian teaching hospital completed a questionnaire battery before and directly after one consultation, and before their next consultation. Eighty consecutive patients with heterogeneous cancers participated in the study. Preferences for general and specific information, involvement and support were elicited at each assessment. Locus of control and patient familiarity with the clinic were measured before the first consultation. Patient satisfaction with the consultation was assessed directly after the consultation. Demographic and disease data were recorded for each patient. RESULTS: General preferences for information and involvement were relatively stable, at least in the short term; however there was considerable variability in preferences for specific topics of information. Patients whose condition had recently worsened were more likely to want progressively less involvement in decision-making. Gender, the doctor seen and religion were also predictive of patient preferences. CONCLUSIONS: Situational factors, such as change in disease status, may alter a patient's preferences for information and involvement. If we wish to match the provision of information and support to the expressed needs of patients, we must ask patients at each consultation what those needs are.}, author = {Butow, P N and Maclean, M and Dunn, S M and Tattersall, M H and Boyer, M J}, institution = {Department of Medicine, University of Sydney, New South Wales, Australia.}, journal = {Annals of oncology official journal of the European Society for Medical Oncology ESMO}, number = {9}, pages = {857--863}, pmid = {9358935}, title = {{The dynamics of change: cancer patients' preferences for information, involvement and support.}}, url = {http://annonc.oxfordjournals.org/cgi/content/abstract/8/9/857}, volume = {8}, year = {1997} } @article{Paiva2005, author = {Paiva, Ana and Dias, Jo\~{a}o and Sobral, Daniel and Aylett, Ruth and Woods, Sarah and Hall, Lynne and Zoll, Carsten}, doi = {10.1080/08839510590910165}, file = {::}, issn = {0883-9514}, journal = {Applied Artificial Intelligence}, month = mar, number = {3-4}, pages = {235--266}, title = {{Learning By Feeling: Evoking Empathy With Synthetic Characters}}, url = {http://www.tandfonline.com/doi/abs/10.1080/08839510590910165}, volume = {19}, year = {2005} } @article{Prendinger2005, abstract = {In this paper, we report on our efforts in developing affective character-based interfaces, i.e., interfaces that recognize and measure affective information of the user and address user affect by employing embodied characters. In particular, we describe the Empathic Companion, an ani- mated interface agent that accompanies the user in the setting of a virtual job interview. This inter- face application takes physiological data (skin conductance and electromyography) of a user in realtime, interprets them as emotions, and addresses the user’s affective states in the form of empathic feedback. The Empathic Companion is conceived as an educational agent that supports job seekers preparing for a job interview. We also present results from an exploratory study that aims to evaluate the impact of the Empathic Companion by measuring users’ skin conductance and heart rate. While an overall positive effect of the Empathic Companion could not be shown, the outcome of the experiment suggests that empathic feedback has a positive effect on the interviewee’s stress level while hearing the interviewer question.}, author = {Prendinger, Helmut and Ishizuka, M.}, doi = {10.1080/08839510590910174}, file = {::;::}, journal = {Applied Artificial Intelligence}, keywords = {electromyography,physiological signals,skin conductance}, number = {3-4}, pages = {267--286}, publisher = {Citeseer}, title = {{The Empathic Companion - A Character-based Interface that Addresses Users’ Affective States}}, volume = {19}, year = {2005} } @article{Woods1970, abstract = {The use of augmented transition network grammars for the analysis of natural language sentences is described. Struc- ture-building actions associated with the arcs of the gram- mar network allow for the reordering, restructuring, and copy- ing of constituents necessary to produce deep-structure repre- sentations of the type normally obtained from a transforma- tional analysis, and conditions on the arcs allow for a powerful selectivity which can rule out meaningless analyses and take advantage of semantic information to guide the parsing. The advantages of this model for natural language analysis are discussed in detail and illustrated by examples. An imple- mentation of an experimental parsing system for transition network grammars is briefly described.}, author = {Woods, W A}, doi = {10.1145/355598.362773}, editor = {Grosz, Barbara and Jones, Karen and Webber, Bonnie}, file = {::}, issn = {00010782}, journal = {Communications of the ACM}, number = {10}, pages = {591--606}, publisher = {ASSOC COMPUTING MACHINERY}, title = {{Transition Network Grammars for Natural Language Analysis}}, url = {http://portal.acm.org/citation.cfm?doid=355598.362773}, volume = {13}, year = {1970} } @article{Huang2010, author = {Huang, Lixing and Morency, Louis-Philippe and Gratch, Jonathan}, file = {::}, journal = {of the 9th International Conference on}, number = {Aamas}, pages = {10--14}, title = {{Parasocial consensus sampling: combining multiple perspectives to learn virtual human behavior}}, url = {http://dl.acm.org/citation.cfm?id=1838371}, year = {2010} } @inproceedings{Fabri2007, abstract = {We present our work on emotionally expressive avatars, animated virtual characters that can express emotions via facial expressions. Because these avatars are highly distinctive and easily recognizable, they may be used in a range of applications. In the first part of the paper we present their use in computer mediated communication where two or more people meet in virtual space, each represented by an avatar. Study results suggest that social interaction behavior from the real-world is readily transferred to the virtual world. Empathy is identified as a key component for creating a more enjoyable experience and greater harmony between users. In the second part of the paper we discuss the use of avatars as an assistive, educational and therapeutic technology for people with autism. Based on the results of a preliminary study, we provide pointers regarding how people with autism may overcome some of the limitations that characterize their condition.}, address = {Beijing, China}, author = {Fabri, Marc and Elzouki, SYA}, booktitle = {Human-Computer Interaction, HCI Intelligent Multimodal Interaction Environments 12th International Conference}, doi = {10.1007/978-3-540-73110-8}, editor = {Jacko, Julie A.}, file = {::}, keywords = {Emotion,autism,avatar,education,empathy,facial messaging,therapeutic intervention,virtual reality}, pages = {275--285}, publisher = {Springer Berlin / Heidelberg}, title = {{Emotionally expressive avatars for chatting, learning and therapeutic intervention}}, url = {http://dl.acm.org/citation.cfm?id=1769621 http://www.springerlink.com/content/7gju6n38605hp3h2/}, year = {2007} } @inproceedings{Dias2005, abstract = {Interactive virtual environments (IVEs) are now seen as an engaging new way by which children learn experimental sciences and other disciplines. These environments are populated by synthetic characters that guide and stimulate the children activities. In order to build such environments, one needs to address the problem of how achieve believable and empathic characters that act autonomously. Inspired by the work of traditional character animators, this paper proposes an architectural model to build autonomous characters where the agent’s reasoning and behaviour is influenced by its emotional state and personality. We performed a small case evaluation in order to determine if the characters evoked empathic reactions in the users with positive results.}, address = {Covilh\~{a}, Portugal}, author = {Dias, J. and Paiva, Ana}, booktitle = {EPIA 2005, 12th Portuguese Conference on Artificial Intelligence}, doi = {10.1007/11595014\_13}, editor = {Bento, Carlos and Cardoso, Am\'{\i}lcar and Dias, Ga\"{e}l}, file = {::}, pages = {127--140}, publisher = {Springer Berlin / Heidelberg}, title = {{Feeling and reasoning: A computational model for emotional characters}}, url = {http://www.springerlink.com/index/YQ18H62602413554.pdf}, year = {2005} } @article{Stockwell1994, abstract = {The concept of the Alcohol Dependence Syndrome has been influential in the field of alcohol studies in the 1980s. The Severity of Alcohol Dependence Questionnaire (SADQ) is one of a generation of alcohol problem scales developed to measure degree of dependence rather than presence or absence of 'alcoholism'. This paper describes the development of a form of the SADQ for community samples of drinkers (SADQ-C) and its relationship to a brief scale designed to measure impaired control over drinking. In a sample of 52 problem drinkers, SADQ and SADQ-C correlated almost perfectly (r = 0.98). In a larger sample of 197 attenders at a controlled drinking clinic, Principal Components Analysis revealed one major factor accounting for 71.7\% of the total variance. High internal reliability was indicated with a Cronbach's Alpha of 0.98. Application of this instrument in a random survey of Western Australian households is then described. It was necessary to remove items relating to 'reinstatement of dependence' for this sample. A single major factor was identified by principal components analysis, accounting for 69.1\% of the total variance. In both the clinic and the community samples SADQ-C scores correlated highly with Impairment of Control scores. The findings are interpreted as supporting the view that there is a single dimension of alcohol dependence upon which all persons who drink alcohol with any regularity may be located.}, author = {Stockwell, T and Sitharthan, T and McGrath, D and Lang, E}, institution = {National Centre for Research into the Prevention of Drug Abuse, Curtin University of Technology, Perth, Western Australia.}, journal = {Addiction Abingdon England}, keywords = {adolescent,adult,aged,alcohol drinking,alcohol drinking adverse effects,alcohol drinking epidemiology,alcohol drinking psychology,alcoholism,alcoholism classification,alcoholism diagnosis,alcoholism epidemiology,alcoholism psychology,cross sectional studies,female,humans,incidence,internal external control,male,middle aged,psychometrics,reproducibility results,substance withdrawal syndrome,substance withdrawal syndrome classification,substance withdrawal syndrome diagnosis,substance withdrawal syndrome epidemiology,substance withdrawal syndrome psychology,western australia,western australia epidemiology}, number = {2}, pages = {167--174}, pmid = {8173482}, title = {{The measurement of alcohol dependence and impaired control in community samples.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/8173482}, volume = {89}, year = {1994} } @article{Mehrabian1967, author = {Mehrabian, Albert and Ferris, S R}, journal = {Journal of Consulting Psychology}, keywords = {attitude,communication,facial expression,female,humans,verbal behavior}, number = {3}, pages = {248--252}, pmid = {6046577}, title = {{Inference of attitudes from nonverbal communication in two channels.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/6046577}, volume = {31}, year = {1967} } @article{Chavhan2010, author = {Chavhan, Yashpalsing and Dhore, M. L. and Yesaware, Pallavi}, doi = {10.5120/431-636}, file = {::}, issn = {09758887}, journal = {International Journal of Computer Applications}, keywords = {emotion recognition,mfcc and,speech emotion,svm}, month = feb, number = {20}, pages = {8--11}, title = {{Speech Emotion Recognition using Support Vector Machine}}, url = {http://www.ijcaonline.org/journal/number20/pxc387636.pdf}, volume = {1}, year = {2010} } @inproceedings{Legaspi2008, author = {Legaspi, Roberto and Kurihara, Satoshi and Fukui, K.I. and Moriyama, Koichi and Numao, Masayuki}, booktitle = {Human system interactions, 2008 conference on}, file = {::}, isbn = {1424415438}, keywords = {empathic computing,interfaces,machine learning,user modeling and user-adaptive}, pages = {209--214}, publisher = {IEEE}, title = {{An empathy learning problem for HSI: To be empathic, self-improving and ambient}}, url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=4581435}, year = {2008} } @article{Wolf2010, abstract = {Computer Vision and Biometrics systems have demonstrated considerable improvement in recognizing and verifying faces in digital images. Still, recognizing faces appearing in unconstrained, natural conditions remains a challenging task. In this paper we present a face-image, pair-matching approach primarily developed and tested on the "Labeled Faces in the Wild" (LFW) benchmark that reflect the challenges of face recognition from unconstrained images. The approach we propose makes the following contributions. (a) We present a family of novel face-image descriptors designed to capture statistics of local patch similarities. (b) We demonstrate how semi-labeled background samples may be used to better evaluate image similarities. To this end we describe a number of novel, effective similarity measures. (c) We show how labeled background samples, when available, may further improve classification performance, by employing a unique pair-matching pipeline. We present state-of-the-art results on the LFW pair-matching benchmarks. In addition, we show our system to be well suited for multi-label face classification (recognition) problems. We perform recognition tests on LFW images as well images from the laboratory controlled multiPIE database.}, author = {Wolf, Lior and Hassner, Tal and Taigman, Yaniv}, doi = {10.1109/TPAMI.2010.230}, file = {::}, issn = {1939-3539}, journal = {IEEE transactions on pattern analysis and machine intelligence}, month = dec, pages = {1--13}, pmid = {21173442}, title = {{Effective Unconstrained Face Recognition by Combining Multiple Descriptors and Learned Background Statistics.}}, volume = {33}, year = {2010} } @incollection{Catrambone2004, author = {Catrambone, Richard and Stasko, John and Xiao, Jun}, booktitle = {From Brows to Trust: Evaluating Embodied Conversational Agents}, chapter = {9}, editor = {Ruttkay, Zs\'{o}fia and Pelachaud, Catherine}, file = {::}, isbn = {1-4020-2730-3}, keywords = {embodied conversational agent,evaluation,research framework,task}, pages = {239--267}, publisher = {Kluwer Academic Publishers}, title = {{ECA AS USER INTERFACE PARADIGM: Experimental Findings within a Framework for Research}}, year = {2004} } @article{Fretz1966, author = {Fretz, B. R.}, journal = {Journal of Counseling Psychology}, pages = {343}, title = {{Posturalmovement in a counseling dyad}}, volume = {13}, year = {1966} } @article{Peter2003, author = {Sonnby-borgstr\"{o}m, Marianne and Jonsson, Peter and Svensson, Owe}, file = {::}, journal = {Journal of Nonverbal}, keywords = {emg,emotional contagion,empathy,facial expressions,facial mim-,icry,mirror neurons}, number = {1}, pages = {3--23}, title = {{Emotional empathy as related to mimicry reactions at different levels of information processing}}, url = {http://www.springerlink.com/index/P81X69QTH751V836.pdf}, volume = {27}, year = {2003} } @article{Cai2006, abstract = {Empathic computing is an emergent paradigm that enables a system to understand human states and feelings and to share this intimate information. The new paradigm is made possible by the convergence of affordable sensors, embedded processors and wireless ad-hoc networks. The power law for multi-resolution channels and mobile-stationary sensor webs is introduced to resolve the information avalanche problems. As empathic computing is sensor-rich computing, particular models such as semantic differential expressions and inverse physics are discussed. A case study of a wearable sensor network for detection of a falling event is presented. It is found that the location of the wearable sensor is sensitive to the results. From the machine learning algorithm, the accuracy reaches up to 90\% from 21 simulated trials. Empathic computing is not limited to healthcare. It can also be applied to solve other everyday-life problems such as management of emails and stress.}, author = {Cai, Yang}, doi = {10.1007/11825890\_3}, file = {::}, journal = {Ambient Intelligence in Everyday Life, Lecture Notes in Computer Science}, pages = {67--85}, publisher = {Springer}, title = {{Empathic computing}}, url = {http://www.springerlink.com/index/l482m128476w5043.pdf}, volume = {3864/2006}, year = {2006} } @book{Greene2003, abstract = {Providing a thorough review and synthesis of work on communication skills and skill enhancement, this "Handbook" serves as a comprehensive and contemporary survey of theory and research on social interaction skills. Editors John O. Greene and Brant R. Burleson have brought together preeminent researchers and writers to contribute to this volume, establishing a foundation on which future study and research will build. The handbook chapters are organized into five major units: general theoretical and methodological issues (models of skill acquisition, methods of skill assessment); fundamental interaction skills (both transfunctional and transcontextual); function-focused skills (informing, persuading, supporting); skills used in management of diverse personal relationships (friendships, romances, marriages); and skills used in varied venues of public and professional life (managing leading, teaching). Distinctive features of this handbook include: broad, comprehensive treatment of work on social interaction skills and skill acquisition; up-to-date reviews of research in each area; and emphasis on empirically supported strategies for developing and enhancing specific skills. Researchers in communication studies, psychology, family studies, business management, and related areas will find this volume a comprehensive, authoritative source on communications skills and their enhancement, and it will be essential reading for scholars and students across the spectrum of disciplines studying social interaction.}, author = {Greene, John O and Burleson, Brant Raney}, booktitle = {Communication}, editor = {Greene, John O and Burleson, Brant R}, isbn = {0805834176}, pages = {1051}, publisher = {Lawrence Erlbaum Associates, Inc., Publishers}, title = {{Handbook of Communication and Social Interaction Skills}}, year = {2003} } @article{Pardas2002, abstract = {The video analysis system described in this paper aims at facial expression recognition consistent with the MPEG4 standardized parameters for facial animation, FAP. For this reason, two levels of analysis are necessary: low level analysis to extract the MPEG4 compliant parameters and high level analysis to estimate the expression of the sequence using these low level parameters. The low level analysis is based on an improved active contour algorithm that uses high level information based on Principal Component Analysis to locate the most significant contours of the face (eyebrows and mouth), and on motion estimation to track them. The high level analysis takes as input the FAP produced by the low level analysis tool and, by means of a Hidden Markov Model classifier, detects the expression of the sequence.}, author = {Pard\`{a}s, Montse and Bonafonte, Antonio}, doi = {10.1016/S0923-5965(02)00078-4}, file = {::}, issn = {09235965}, journal = {Signal Processing: Image Communication}, month = oct, number = {9}, pages = {675--688}, title = {{Facial animation parameters extraction and expression recognition using Hidden Markov Models}}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0923596502000784}, volume = {17}, year = {2002} } @inproceedings{Broek2005, address = {Utrecht – The Netherlands}, author = {van den Broek, E. L.}, booktitle = {in Proceedings of AAMAS-05 Agent-Based Systems for Human Learning (ABSHL) workshop}, editor = {Johnson, L. and Richards, D. and Sklar, E. and Wilensky, U.}, pages = {59--67}, title = {{Empathic agent technology}}, year = {2005} } @article{Pentland2000, author = {Pentland, A.}, journal = {Communications of the ACM}, number = {3}, pages = {35--44}, title = {{Looking at people}}, volume = {43}, year = {2000} } @phdthesis{Bickmore2003, author = {Bickmore, Timothy Wallace}, school = {Massachusetts Institute of Technology}, title = {{Relational Agents : Effecting Change through Human-Computer Relationships by Certified by}}, type = {Doctor of Philosophy}, year = {2003} } @article{Shapiro2011, author = {Shapiro, Ari}, file = {::}, journal = {Motion in Games(MIG'11)}, keywords = {animation,character,graphics,system}, number = {7060}, pages = {98--109}, title = {{Building a character animation system}}, url = {http://www.springerlink.com/index/L24P125448583571.pdf}, volume = {LNCS}, year = {2011} } @book{Kipp2005, author = {Kipp, Michael}, isbn = {1581122551, 9781581122558}, publisher = {Universal-Publishers}, title = {{Gesture Generation By Imitation: From Human Behavior To Computer Character Animation}}, year = {2005} } @techreport{Gratch2010, author = {Gratch, Jonathan and Kang, Sin-hwa and Wang, Ning}, booktitle = {Imagine}, institution = {University of Southern California}, number = {Chap X}, pages = {1--22}, title = {{Using social agents explore theories of rapport and emotional resonance}}, year = {2010} } @article{Gruen1986, author = {Gruen, Rand J. and Mendelsohn, Gerald}, doi = {10.1037/0022-3514.51.3.609}, issn = {1939-1315}, journal = {Journal of Personality and Social Psychology}, number = {3}, pages = {609--614}, title = {{Emotional responses to affective displays in others: The distinction between empathy and sympathy.}}, volume = {51}, year = {1986} } @article{Picard2001, author = {Picard, Rosalind W and Vyzas, E. and Healey, J.}, doi = {10.1109/34.954607}, issn = {01628828}, journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, number = {10}, pages = {1175--1191}, title = {{Toward machine emotional intelligence: analysis of affective physiological state}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=954607}, volume = {23}, year = {2001} } @article{Gupta2012, author = {Gupta, Prabodh and Jhala, Darshana and Jhala, Nirag}, doi = {10.1309/AJCPLAE62CRYYXNW}, file = {::}, issn = {1943-7722}, journal = {American journal of clinical pathology}, month = jan, number = {1}, pages = {160}, pmid = {22180490}, title = {{Book review.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/22180490}, volume = {137}, year = {2002} } @article{Hess2001, author = {Hess, Ursula and Blairy, Sylvie}, file = {::}, journal = {International Journal of Psychophysiology}, keywords = {emotion recognition,emotional contagion,facial mimicry}, pages = {129--141}, title = {{Facial mimicry and emotional contagion to dynamic emotional facial expressions and their influence on decoding accuracy}}, volume = {40}, year = {2001} } @article{Friesen1983, author = {Friesen, Wallace V and Ekman, Paul}, journal = {Unpublished manuscript, University of California at San Francisco}, publisher = {University of California}, title = {{EMFACS-7: Emotional Facial Action Coding System}}, url = {http://scholar.google.com/scholar?hl=en\&btnG=Search\&q=intitle:EMFACS-7:+Emotional+Facial+Action+Coding+System\#0}, year = {1983} } @inproceedings{McQuiggan2008, abstract = {Humans continuously assess one another’s situational context, modify their own affective state, and then respond based on these outcomes through empathetic expression. Virtual agents should be capable of similarly empathizing with users in interactive environments. A key challenge posed by empathetic reasoning in virtual agents is determining whether to respond with parallel or reactive empathy. Parallel empathy refers to mere replication of another’s affective state, whereas reactive empathy exhibits greater cognitive awareness and may lead to incongruent emotional responses (i.e., emotions different from the recipient’s and perhaps intended to alter negative affect). This paper proposes a unified inductive framework for modeling parallel and reactive empathy. Empathy models are used to drive runtime situation-appropriate empathetic behaviors by selecting suitable parallel or reactive empathetic expressions.}, address = {Estoril, Portugal}, author = {McQuiggan, Scott W and Robison, Jennifer and Phillips, Robert}, booktitle = {Proceedings of 7th Int. Conf. on Autonomous Agents and Multiagent Systems (AAMAS 2008)}, editor = {{Padgham, Parkes}, M\"{u}ller and Parsons}, file = {::}, keywords = {Affective Reasoning,Empathy,Human-Computer Interaction,Intelligent Virtual Agents,Machine Learning}, number = {Aamas}, pages = {167--174}, publisher = {International Foundation for Autonomous Agents and Multiagent Systems (www.ifaamas.org)}, title = {{Modeling parallel and reactive empathy in virtual agents: An inductive approach}}, year = {2008} } @article{Blair2003, abstract = {Human emotional expressions serve a crucial communicatory role allowing the rapid transmission of valence information from one individual to another. This paper will review the literature on the neural mechanisms necessary for this communication: both the mechanisms involved in the production of emotional expressions and those involved in the interpretation of the emotional expressions of others. Finally, reference to the neuro-psychiatric disorders of autism, psychopathy and acquired sociopathy will be made. In these conditions, the appropriate processing of emotional expressions is impaired. In autism, it is argued that the basic response to emotional expressions remains intact but that there is impaired ability to represent the referent of the individual displaying the emotion. In psychopathy, the response to fearful and sad expressions is attenuated and this interferes with socialization resulting in an individual who fails to learn to avoid actions that result in harm to others. In acquired sociopathy, the response to angry expressions in particular is attenuated resulting in reduced regulation of social behaviour.}, author = {Blair, R J R}, issn = {09628436}, journal = {Philosophical Transactions of the Royal Society of London Series BBiological Sciences}, keywords = {amygdala,autism,autistic children,bilateral amygdala damage,communication,emotional faces,facial expressions,fusiform face area,human brain,prefrontal cortex,psychopath,selective impairment,smile production,social information,temporal visual cortex}, number = {1431}, pages = {561--572}, title = {{Facial expressions, their communicatory functions and neuro-cognitive substrates}}, volume = {358}, year = {2003} } @article{Shields2005, abstract = {OBJECTIVES: To develop a reliable and valid computer coded measure to assess emotional expression from transcripts of physician-patient interactions. METHODS: Physician encounters with two standardized patients (SPs) were audiotaped. Fifty patients from each physician (n = 100 primary care physicians) completed surveys that assessed patients' perceptions of their relationships with physicians. Audio-recordings of 193 patient-physician encounters were transcribed and computer-coded to derive a percent emotion words, and research assistants completed the Measure of Patient-Centered Communication (MPCC). RESULTS: After adjustment for potential confounders, regression analyses revealed physicians' use of emotion words and the MPCC contribute independently to patients' and SPs' perceptions of their relationship with physicians. CONCLUSIONS: The computerized coding of emotion words shows promise as a reliable, valid, and simple method to code transcript data of physician-patient interactions. This method may be expanded to examine other aspects of physician language and does not require coder training.}, author = {Shields, Cleveland G and Epstein, Ronald M and Franks, Peter and Fiscella, Kevin and Duberstein, Paul and McDaniel, Susan H and Meldrum, Sean}, institution = {Department of Family Medicine, University of Rochester Medical Center, Rochester Center to Improve Communication in Health Care, 1381 South Avenue, Rochester, NY 14620-2830, USA. Cleveland\_Shields@URMC.Rochester.edu}, journal = {Patient Education and Counseling}, number = {2}, pages = {232--238}, pmid = {15911198}, publisher = {Elsevier}, title = {{Emotion language in primary care encounters: reliability and validity of an emotion word count coding system.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/15911198}, volume = {57}, year = {2005} } @article{Lafrance1976, author = {Lafrance, Marianne and Broadbent, M.}, doi = {10.1177/105960117600100307}, file = {::}, isbn = {1059601176}, issn = {1059-6011}, journal = {Group \& Organization Management}, month = sep, number = {3}, pages = {328--333}, title = {{Group Rapport: Posture Sharing as a Nonverbal Indicator}}, volume = {1}, year = {1976} } @article{Cliffordson2002, abstract = {The purpose of the present study was to examine the structure of empathy using a hierarchical approach, and to compare the dimensions of empathy with measures of social functioning, in order to contribute to the understanding of the nature of empathy. The dimensionality of the Interpersonal Reactivity Index, which comprises four subscales (empathic concern, perspective taking, fantasy and personal distress) was examined using confirmatory factor analysis. Relations with the Social Skills Inventory were also investigated. A sample of 127 applicants for places on nursing and social work undergraduate programs participated in the study. The study findings indicate that empathy is hierarchically organized, with one general dimension at the apex. The general factor is identical to empathic concern and this dimension overlaps to a great extent with perspective taking and fantasy. The findings also indicate that the general dimension constitutes an integrated entirety, with its main emphasis on emotional reactivity by also involving cognitive processes.}, author = {Cliffordson, Christina}, doi = {10.1111/1467-9450.00268}, file = {::}, issn = {0036-5564}, journal = {Scandinavian journal of psychology}, keywords = {Empathy,Factor Analysis,Humans,Social Behavior,Statistical}, month = feb, number = {1}, pages = {49--59}, pmid = {11885760}, title = {{The hierarchical structure of empathy: dimensional organization and relations to social functioning}}, url = {http://onlinelibrary.wiley.com/doi/10.1111/1467-9450.00268/abstract}, volume = {43}, year = {2002} } @inproceedings{Bickmore2009, abstract = {Ninety million Americans have inadequate health literacy, resulting in a reduced ability to read and follow directions in the healthcare environment. We describe an animated, empathic virtual nurse interface for design rationale, and two Boston University School of Medicine Boston Medical Center brian.jack@bmc.org educating and counseling hospital patients with inadequate health literacy in their hospital beds at the time of discharge. The development methodology, iterations of user testing are described. Results indicate that hospital patients with low health literacy found the system easy to use, reported high levels of satisfaction, and most said they preferred receiving the discharge information from the agent over their doctor or nurse. Patients also expressed appreciation for the time and attention provided by the virtual nurse, and felt that it provided an additional authoritative source for their medical information.}, address = {New York}, author = {Bickmore, Timothy W. and Pfeifer, Laura M and Jack, Brian W}, booktitle = {Proceedings of the 27th international ACM conference on Human factors in computing systems (CHI'09)}, file = {::}, isbn = {9781605582467}, keywords = {Access,Conversational Agent,Embodied,Health Literacy,Hospital Discharge,Patient Education,Patient Safety,Relational Agent,Universal}, pages = {1265--1274}, publisher = {ACM}, title = {{Taking the Time to Care : Empowering Low Health Literacy Hospital Patients with Virtual Nurse Agents}}, year = {2009} } @inproceedings{Pasquariello2001, author = {Pasquariello, Stefano and Pelachaud, Catherine}, booktitle = {Proceedings 6th Online World Conference on Soft Computing in Industrial Appications Session on Soft Computing for Intelligent 3D Agents}, title = {{Greta: A Simple Facial Animation Engine}}, year = {2001} } @article{Prendinger2006, author = {Prendinger, H and Becker, Christian}, file = {::}, journal = {International Journal of Humanoid}, keywords = {affective behavior,empathy,evaluation,life-like characters,physiological user information}, number = {3}, pages = {371--391}, title = {{A STUDY IN USERS'S;PHYSIOLOGICAL RESPONSE TO AN EMPATHIC INTERFACE AGENT}}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.77.9379\&rep=rep1\&type=pdf}, volume = {3}, year = {2006} } @inproceedings{Cairco2009, abstract = {Avari is a virtual receptionist for the Computer Science department at The University of North Carolina at Charlotte. Her components include background subtraction to detect a person’s presence, speech recognition, audio and visual devices to communicate with passersby. Deployed in a public setting, we investigate the reactions and interactions of passersby with Avari. We describe the design and architecture of the virtual human and discuss the effectiveness of a publicly deployed virtual human.}, address = {Clemson, SC, USA.}, author = {Cairco, Lauren and Hill, Rock and Wilson, Dale-marie and Fowler, Vicky and Leblanc, Morris}, booktitle = {48th ACM Southeast Conference (ACMSE'09)}, file = {::}, isbn = {9781605584218}, keywords = {human-centered,human-computer interaction,virtual humans}, pages = {1--6}, title = {{AVARI : Animated Virtual Agent Retrieving Information}}, year = {2009} } @article{Lafrance1976, author = {Lafrance, Marianne and Broadbent, M.}, doi = {10.1177/105960117600100307}, file = {::}, isbn = {1059601176}, issn = {1059-6011}, journal = {Group \& Organization Management}, month = sep, number = {3}, pages = {328--333}, title = {{Group Rapport: Posture Sharing as a Nonverbal Indicator}}, volume = {1}, year = {1976} } @article{DiClemente2001, abstract = {OBJECTIVE: To offer a taxonomy of types of feedback and describe potential mechanisms of action particularly in the area of addictive behaviors. METHOD: Reviewed the literature to examine support for types-Generic, Targeted, and Personalized-and for mechanisms of feedback. RESULTS: Although it is not clear how it works, feedback is thought to offer important information, to create a sense of caring and helping relationship, to reach more directly decisional considerations, to increase engagement in the materials, to increase motivation, or to provide social comparison and norms. CONCLUSIONS: Avenues for future research in search of the most effective manner of using feedback to promote health behavior change are discussed.}, author = {DiClemente, C C and Marinilli, A S and Singh, M and Bellino, L E}, institution = {Psychology Department, University of Maryland, Baltimore County, Baltimore 21250, USA. diclemen@umbc.edu}, journal = {American Journal of Health Behavior}, keywords = {addictive,addictive prevention \& control,addictive psychology,behavior,classification,feedback,health behavior,health education,health education classification,humans,mass screening,models,psychological,risk taking}, number = {3}, pages = {217--227}, pmid = {11322620}, publisher = {PNG Publications}, title = {{The role of feedback in the process of health behavior change.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/11322620}, volume = {25}, year = {2001} } @inproceedings{DeCarlo2002, abstract = {People highlight the intended interpretation of their utterances within a larger discourse by a diverse set of nonverbal signals. These signals represent a key chal- lenge for animated conversational agents because they are pervasive, variable, and need to be coordinated ju- diciously in an effective contribution to conversation. In this paper, we describe a freely-available cross-platform real-time facial animation system, RUTH, that animates such high-level signals in synchrony with speech and lip movements. RUTH adopts an open, layered archi- tecture in which fine-grained features of the animation can be derived by rule from inferred linguistic structure, allowing us to use RUTH, in conjunction with annota- tion of observed discourse, to investigate the meaningful high-level elements of conversational facial movement for American English speakers.}, author = {DeCarlo, D. and Revilla, C. and Stone, Matthew and Venditti, J.J.}, booktitle = {Proceedings of Computer Animation 2002 (CA 2002)}, doi = {10.1109/CA.2002.1017501}, isbn = {0-7695-1594-0}, number = {Ca}, pages = {11--16}, publisher = {IEEE Comput. Soc}, title = {{Making discourse visible: coding and animating conversational facial displays}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=1017501}, volume = {2002}, year = {2002} } @article{Meltzoff1977, abstract = {Infants between 12 and 21 days of age can imitate both facial and manual gestures; this behavior cannot be explained in terms of either conditioning or innate releasing mechanisms. Such imitation implies that human neonates can equate their own unseen behaviors with gestures they see others perform.}, author = {Meltzoff, AN}, doi = {10.1126/science.198.4312.75}, file = {::}, issn = {0036-8075}, journal = {Science}, month = oct, number = {4312}, pages = {75--8}, pmid = {17741897}, title = {{Imitation of facial and manual gestures by human neonates}}, volume = {198}, year = {1977} } @book{Goldstein1985, author = {Goldstein, Arnold P. and Michaels, Gerald Y.}, edition = {1}, isbn = {089859538X}, pages = {304}, publisher = {Hillsdale, N.J. : L. Erlbaum Associates}, title = {{Empathy: development, training, and consequences}}, year = {1985} } @article{Straalen2009, author = {Straalen, Bart Van and Heylen, Dirk and Theune, Mari\"{e}t}, file = {::}, journal = {Agents for Games and}, keywords = {bad news con-,embodied conversational agents,empathy,social agents,tutoring,versations}, pages = {95--106}, title = {{Enhancing Embodied Conversational Agents with Social and Emotional Capabilities}}, url = {http://www.springerlink.com/index/3612181747K5L570.pdf}, year = {2009} } @article{Mehrabian1972, author = {Mehrabian, Albert and Epstein, N}, file = {::}, journal = {Journal of Personality}, number = {4}, pages = {525--543}, pmid = {4642390}, publisher = {Wiley Online Library}, title = {{A measure of emotional empathy.}}, volume = {40}, year = {1972} } @article{Shamay-Tsoory2011, abstract = {Human empathy relies on the ability to share emotions as well as the ability to understand the other's thoughts, desires, and feelings. Recent evidence points to 2 separate systems for empathy: an emotional system that supports our ability to empathize emotionally and a cognitive system that involves cognitive understanding of the other's perspective. A neural network that includes the inferior frontal gyrus and the inferior parietal lobule is necessary for emotion recognition and emotional contagion. Although the emotional and cognitive systems appear to work independently, every empathic response may still evoke both components to some extent, depending on the social context.}, annote = {Difference between cognitive empathy and emotional empathy is explained in this paper. Also they talk about the active brain parts in each empathy type.}, author = {Shamay-Tsoory, Simone G}, doi = {10.1177/1073858410379268}, file = {::}, issn = {1089-4098}, journal = {The Neuroscientist : a review journal bringing neurobiology, neurology and psychiatry}, keywords = {Brain,Brain: physiology,Empathy,Empathy: physiology,Humans,Neural Pathways,Neural Pathways: physiology}, month = feb, number = {1}, pages = {18--24}, pmid = {21071616}, title = {{The neural bases for empathy.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/21071616}, volume = {17}, year = {2011} } @article{Varni2009, author = {Varni, Giovanna and Camurri, Antonio and Coletta, Paolo and Volpe, Gualtiero}, doi = {10.1109/CSE.2009.230}, file = {::}, isbn = {978-1-4244-5334-4}, journal = {2009 International Conference on Computational Science and Engineering}, keywords = {Social signals, music, synchronisation}, pages = {843--848}, publisher = {Ieee}, title = {{Toward a Real-Time Automated Measure of Empathy and Dominance}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5283210}, year = {2009} } @article{Gratch2006, author = {Gratch, Jonathan and Okhmatovskaia, Anna and Lamothe, Francois}, file = {::}, journal = {Intelligent Virtual}, title = {{Virtual rapport}}, url = {http://www.springerlink.com/index/k720537752657m81.pdf}, year = {2006} } @article{Shields2005, abstract = {OBJECTIVES: To develop a reliable and valid computer coded measure to assess emotional expression from transcripts of physician-patient interactions. METHODS: Physician encounters with two standardized patients (SPs) were audiotaped. Fifty patients from each physician (n = 100 primary care physicians) completed surveys that assessed patients' perceptions of their relationships with physicians. Audio-recordings of 193 patient-physician encounters were transcribed and computer-coded to derive a percent emotion words, and research assistants completed the Measure of Patient-Centered Communication (MPCC). RESULTS: After adjustment for potential confounders, regression analyses revealed physicians' use of emotion words and the MPCC contribute independently to patients' and SPs' perceptions of their relationship with physicians. CONCLUSIONS: The computerized coding of emotion words shows promise as a reliable, valid, and simple method to code transcript data of physician-patient interactions. This method may be expanded to examine other aspects of physician language and does not require coder training.}, author = {Shields, Cleveland G and Epstein, Ronald M and Franks, Peter and Fiscella, Kevin and Duberstein, Paul and McDaniel, Susan H and Meldrum, Sean}, institution = {Department of Family Medicine, University of Rochester Medical Center, Rochester Center to Improve Communication in Health Care, 1381 South Avenue, Rochester, NY 14620-2830, USA. Cleveland\_Shields@URMC.Rochester.edu}, journal = {Patient Education and Counseling}, number = {2}, pages = {232--238}, pmid = {15911198}, publisher = {Elsevier}, title = {{Emotion language in primary care encounters: reliability and validity of an emotion word count coding system.}}, url = {http://www.ncbi.nlm.nih.gov/pubmed/15911198}, volume = {57}, year = {2005} } @incollection{Rogers1959, address = {New York}, author = {Rogers, C R}, booktitle = {Psychology: the Study of a Science}, chapter = {3}, editor = {Koch, S}, pages = {184--256}, publisher = {McGraw-Hill}, title = {{A theory of therapy, personality and interpersonal relationships as developed in the client-centered framework}}, volume = {3}, year = {1959} } @incollection{Chung2007, abstract = {The present paper focuses on the influence of avatar creation in a video game. More specifically, this study investigates the effects of avatar creation on attitude towards avatar, empathy, presence, and para-social interaction of female non-game users. As a cyber-self, an avatar is a graphic character representing a user in cyberspace. Avatars are primarily used in the entertainment industry as high-tech novelties, controlled by game users, for high-end video games. Some games provide game characters by default that users cannot change, but other games provide various options gamers can choose. What if game users can create their own avatars? Do they have more psychological closeness with their avatars as their cyber-selves? This study tested the differences of attitude, empathy, presence, and para-social interaction of female non-game users between an avatar creation group and a non-avatar creation group and resulted in no difference.}, author = {Chung, Donghun and DeBuys, Brahm and Nam, Chang}, booktitle = {Human-Computer Interaction. Interaction Design and Usability}, doi = {10.1007/978-3-540-73105-4\_78}, file = {::}, isbn = {978-3-540-73104-7}, keywords = {Avatar - Attitude - Empathy - Presence - Para-Soci}, pages = {711--720}, publisher = {Springer Berlin / Heidelberg}, title = {{Influence of avatar creation on attitude, empathy, presence, and para-social interaction}}, url = {http://www.springerlink.com/index/9518116J51670433.pdf}, year = {2007} } @inproceedings{Jaques2004, abstract = {In this paper we describe the use of mental states, more specifically the BDI approach, to implement the process of affective diagnosis in an educational environment. We use the OCC model, which is based on the cognitive theory of emotions and is possible to be implemented computationally, in order to infer the learner’s emotions from his actions in the system interface. The BDI approach is very adequate since the emotions have a dynamic nature. Besides, in our work we profit from the reasoning capacity of the BDI approach in order to infer the student’s appraisal, which allow us to deduce student’s emotions.}, address = {Puebla}, author = {Jaques, Patricia Augustin and Viccari, Rosa M}, booktitle = {IBERO-AMERICAN CONFERENCE ON ARTIFICIAL INTELLIGENCE (IBERAMIA)}, pages = {901--911}, publisher = {Springer-Verlag}, title = {{A BDI Approach to Infer Student's Emotions}}, year = {2004} } @incollection{Cooper2000, abstract = {This paper considers how research into empathy in teaching and learning can inform the research into intelligent systems and intelligent agents embedded in educational applications. It also relates this research to some analysis of classroom practice completed as part of the EU funded NIMIS project. The project is developing three applications, one of which aims to support writing development with young children aged 5-6 years based on a cartoon format. The NIMIS classroom as a whole is designed to enhance and augment existing classroom practices and to foster collaboration by non-intrusive hardware and intuitive hardware and software interfaces. To this end it seeks to enhance both human and electronic communication in the classroom. Empathy is central to ensuring the quality of human communication and personal development. This paper suggests that intelligent systems that can consider more carefully the processes and feelings involved in human interactions in teaching and learning, may promote higher quality support for students in classrooms.}, author = {Cooper, Bridget and Brna, Paul and Martins, Alex}, booktitle = {Affective Interactions Towards a New Generation of Computer Interfaces}, doi = {10.1007/10720296\_3}, editor = {Paiva, Ana}, isbn = {978-3-540-41520-6}, pages = {21--34}, publisher = {Springer Berlin / Heidelberg}, title = {{Effective affective in intelligent systems–building on evidence of empathy in teaching and learning}}, url = {http://www.springerlink.com/index/j8v0l230t3503367.pdf}, volume = {1814/2000}, year = {2000} } @phdthesis{Becker-Asano2008, author = {Becker-Asano, Christian}, file = {::}, keywords = {Emotion,Empathy,PhD Thesis,Secondary Emotions,primary Emotions}, mendeley-tags = {PhD Thesis}, pages = {186}, publisher = {IOS Press}, school = {University of Bielefeld}, title = {{WASABI: Affect simulation for agents with believable interactivity}}, type = {PhD Dissertation, IOS Press (DISKI 319)}, url = {http://books.google.com/books?hl=en\&lr=\&id=8ABvlwHBCQIC\&oi=fnd\&pg=PA1\&dq=WASABI+:+Affect+Simulation+for+Agents+with+Believable+Interactivity\&ots=m6MhCZ6IzD\&sig=IcDYrCYofbGlJ8E1szs\_wltd18k}, volume = {319}, year = {2008} } @article{Sloan2009, author = {Sloan, Robin James Stuart and Cook, Malcolm and Robinson, Brian}, doi = {10.1109/VIZ.2009.28}, file = {::}, isbn = {978-0-7695-3734-4}, journal = {2009 Second International Conference in Visualisation}, keywords = {- character animation,believability,emotional expression,facial animation,inform artistic practice,of and between emotional,perception,produce and test animations,the primary goal of,the project is to}, month = jul, pages = {61--66}, publisher = {Ieee}, title = {{Considerations for Believable Emotional Facial Expression Animation}}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5230711}, year = {2009} } @article{Peter2003, author = {Sonnby-borgstr\"{o}m, Marianne and Jonsson, Peter and Svensson, Owe}, file = {::}, journal = {Journal of Nonverbal}, keywords = {emg,emotional contagion,empathy,facial expressions,facial mim-,icry,mirror neurons}, number = {1}, pages = {3--23}, title = {{Emotional empathy as related to mimicry reactions at different levels of information processing}}, url = {http://www.springerlink.com/index/P81X69QTH751V836.pdf}, volume = {27}, year = {2003} } @article{Southard1918, author = {Southard, E E}, journal = {The Journal of Abnormal Psychology}, number = {4}, pages = {199}, publisher = {American Psychological Association}, title = {{The empathic index in the diagnosis of mental diseases.}}, volume = {13}, year = {1918} } @article{Wallraven2004, address = {New York, New York, USA}, author = {Wallraven, Christian and Cunningham, Douglas W. and Breidt, Martin and B\"{u}lthoff, Heinrich H.}, doi = {10.1145/1012551.1012603}, isbn = {1581139144}, journal = {Proceedings of the 1st Symposium on Applied perception in graphics and visualization - APGV '04}, pages = {181}, publisher = {ACM Press}, title = {{View dependence of complex versus simple facial motions}}, url = {http://portal.acm.org/citation.cfm?doid=1012551.1012603}, year = {2004} } @article{Maurer1983, author = {Maurer, R.E. and Tindall, J.H.}, file = {::}, journal = {Journal of Counseling Psychology}, number = {2}, pages = {158}, publisher = {American Psychological Association}, title = {{Effect of postural congruence on client's perception of counselor empathy.}}, volume = {30}, year = {1983} } @book{Dimeff1999, abstract = {(from the cover) This manual presents a pragmatic and clinically proven approach to the prevention and treatment of undergraduate alcohol abuse. The Brief Alcohol Screening and Intervention for College Students (BASICS) model is a nonconfrontational, harm reduction approach that helps students reduce their alcohol consumption and decrease the behavioral and health risks associated with heavy drinking. Including reproducible handouts and assessment forms, the book takes readers step-by-step through conducting BASICS assessment and feedback sessions. Special topics covered include the use of Diagnostic and Statistical Manual of Mental Disorders-IV (DSM-IV) criteria to evaluate alcohol abuse, ways to counter defensiveness about drinking and how to help students who continue to drink in a hazardous fashion. (PsycINFO Database Record (c) 2010 APA, all rights reserved) (cover)}, author = {Dimeff, Linda A and Baer, John S and Kivlahan, Daniel R and Marlatt, G Alan}, booktitle = {The Journal of Psychiatry Law}, isbn = {1572303921}, pages = {1929--1945}, publisher = {Guilford Press}, title = {{Brief alcohol screening and intervention for college students (BASICS): A harm reduction approach}}, url = {http://search.ebscohost.com/login.aspx?direct=true\&db=psyh\&AN=1999-02125-000\&lang=fr\&site=ehost-live}, volume = {30}, year = {1999} } @article{Johnstone2000, abstract = {This chapter provides a comprehensive overview of the current state of the literature on the vocal communication of emotion. It highlights some of the many evolutionary, physiological, cognitive, social, and cultural factors which shape the way humans express and perceive emotions in speech. With such a large and seemingly disparate number of determinants, it might seem as if the topic were too messy to expect any invariance in empirical findings. Perhaps surprisingly however, the summary of research into the production and perception of emotional speech has revealed considerable consistency. On the production side, the evidence is starting to accumulate that humans consistently modify their speech in specific ways to express different emotions. The major acoustic parameters are described and the relevant literature reviewed. Results of perception studies indicate that emotions expressed in speech are to a large extent successfully detected by a variety of populations, on the basis of an experimentally identifiable set of acoustic parameters. The differences in recognition accuracy between different emotions are discussed. The consistency in the results is no doubt partly because most research to date has been limited to settings in which many of the factors described above have been eliminated or controlled for. In addition to further refinement of analysis techniques and a focus on real, as well as acted, emotional speech, there is clearly a need for studies that better quantify the relative contribution of culture, language and social strategy to the vocal comunication of emotion. To address these issues in a manner that allows results from different studies to be integrated and compared, a coordinated, interdisciplinary approach to research on the vocal communication of emotion will be required.}, author = {Johnstone, Tom and Scherer, Klaus R.}, chapter = {14}, editor = {Lewis, M and Haviland-Jones, J M}, journal = {Handbook of emotions}, number = {1-2}, pages = {220--235}, publisher = {The Guilford Press}, title = {{Vocal communication of emotion}}, url = {http://centaur.reading.ac.uk/4362/}, volume = {2}, year = {2000} } @article{Hingson2005, author = {Hingson, Ralph and Heeren, Timothy and Winter, Michael and Wechsler, Henry}, journal = {Journal of Studies on Alcohol and Drugs}, pages = {12--20}, title = {{MAGNITUDE OF ALCOHOL-RELATED MORTALITY AND MORBIDITY AMONG U.S. COLLEGE STUDENTS AGES 18–24: Changes from 1999 to 2005}}, url = {http://www.jsad.com/}, volume = {16}, year = {2009} } @incollection{Catucci2006, abstract = {Empathy is a distributed environment for the generation of emotions and other related affective phenomena like moods and temperaments. Empathy has been conceived as an object-oriented reusable framework entirely written in Java and realized for the purpose of studying the direct influences of emotions on behaviors and on decision-making processes of autonomous agents, interacting in complex or real environments. It allows for the realization of custom emotional agents, usable in several different domains, from the educational applications (e.g. entertainment, video games, intelligent tutoring systems.) to control systems in autonomous robots.}, author = {Catucci, Graziano and Abbattista, Fabio and Gadaleta, R. and Guaccero, Domenico and Semeraro, Giovanni}, booktitle = {Applied Soft Computing Technologies: The Challenge of Complexity}, doi = {10.1007/3