<?xml version="1.0" encoding="UTF-8"?>
<record
    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xsi:schemaLocation="http://www.loc.gov/MARC21/slim http://www.loc.gov/standards/marcxml/schema/MARC21slim.xsd"
    xmlns="http://www.loc.gov/MARC21/slim">

  <leader>02922nam a22001697a 4500</leader>
  <controlfield tag="003">NUST</controlfield>
  <datafield tag="082" ind1=" " ind2=" ">
    <subfield code="a">629.8</subfield>
  </datafield>
  <datafield tag="100" ind1=" " ind2=" ">
    <subfield code="a">Jaffar,Anum </subfield>
    <subfield code="9">119745</subfield>
  </datafield>
  <datafield tag="245" ind1=" " ind2=" ">
    <subfield code="a">Human Robot Interaction- Personality Prediction of a Human Using Humanoid Robot /</subfield>
    <subfield code="c">Anum Jaffar</subfield>
  </datafield>
  <datafield tag="264" ind1=" " ind2=" ">
    <subfield code="a">Islamabad : </subfield>
    <subfield code="b">SMME- NUST; </subfield>
    <subfield code="c">2023. </subfield>
  </datafield>
  <datafield tag="300" ind1=" " ind2=" ">
    <subfield code="a">67p. ;</subfield>
    <subfield code="b">Soft Copy</subfield>
    <subfield code="c">30cm.</subfield>
  </datafield>
  <datafield tag="520" ind1=" " ind2=" ">
    <subfield code="a">This study presents an innovative approach to predicting personality traits by utilizing
Human-Robot Interaction (HRI). The research focuses on predicting personality traits
based on the Big Five model. The study incorporates nonverbal cues, such as facial
expressions and body language, along with verbal interaction, a 44-item Big Five Inventory
(BFI) questionnaire, and expert analysis. To facilitate the interactive session and
personality prediction, a humanoid robot named NAO was employed. The robot interacted
verbally with the participants, and during these interactions, it captured nonverbal cues,
specifically facial expressions (happy, sad, fear, angry, and surprised), head pose (looking
forward, looking up, looking down, looking left, and looking right), and body poses
(standing, akimbo, close arms, open arms, and thinking). For facial expression analysis, the
researchers employed the Face Emotion Recognition Plus (FER+) dataset, which was
trained using Convolution Neural Network (CNN). This module enabled the recognition of
different facial expressions associated with emotions. The head poses module determined
head angles using Euler angles, while the body pose was estimated by calculating the
shoulder and elbow joint angles using the law of cosine. The proposed system was tested
on 16 participants aged between 21-30 years to access traits i.e., extraversion, neuroticism,
agreeableness, openness, and conscientiousness by integrating questionnaire response,
human-robot interaction, and expert analysis. Results of the study indicate a significant
association between the personality predictions made by the robot and the assessments
conducted by psychologists. In all 16 cases, the predicted personalities were consistent with
the expert opinions. This suggests that the extensive utilization of nonverbal cues,
combined with verbal interaction, holds potential for personality prediction using the Big
Five model. Overall, this study demonstrates an innovative approach to personality
prediction, leveraging Human-Robot Interaction and integrating multiple data sources. By
incorporating nonverbal cues alongside verbal interaction and expert analysis, the proposed
architecture shows promise in predicting personality traits based on the Big Five model.</subfield>
  </datafield>
  <datafield tag="650" ind1=" " ind2=" ">
    <subfield code="a">MS Robotics and Intelligent Machine Engineering                                          </subfield>
    <subfield code="9">119486</subfield>
  </datafield>
  <datafield tag="700" ind1=" " ind2=" ">
    <subfield code="a">Supervisor : Dr. Sara Ali</subfield>
    <subfield code="9">119733</subfield>
  </datafield>
  <datafield tag="856" ind1=" " ind2=" ">
    <subfield code="u">http://10.250.8.41:8080/xmlui/handle/123456789/33949</subfield>
  </datafield>
  <datafield tag="942" ind1=" " ind2=" ">
    <subfield code="2">ddc</subfield>
    <subfield code="c">THE</subfield>
  </datafield>
  <datafield tag="999" ind1=" " ind2=" ">
    <subfield code="c">607364</subfield>
    <subfield code="d">607364</subfield>
  </datafield>
  <datafield tag="952" ind1=" " ind2=" ">
    <subfield code="0">0</subfield>
    <subfield code="1">0</subfield>
    <subfield code="4">0</subfield>
    <subfield code="7">0</subfield>
    <subfield code="a">SMME</subfield>
    <subfield code="b">SMME</subfield>
    <subfield code="c">EB</subfield>
    <subfield code="d">2023-12-13</subfield>
    <subfield code="l">0</subfield>
    <subfield code="o">629.8</subfield>
    <subfield code="p">SMME-TH-857</subfield>
    <subfield code="r">2023-12-13</subfield>
    <subfield code="w">2023-12-13</subfield>
    <subfield code="y">THE</subfield>
  </datafield>
</record>
