Publications
2014
Ioannis Iossifidis
Development of a Haptic Interface for Safe Human Robot Collaboration Inproceedings
In: International Conference on Pervasive and Embedded and Communication Systems, 2014, PECCS2014, 2014.
Abstract | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2014b,
title = {Development of a Haptic Interface for Safe Human Robot Collaboration},
author = {Ioannis Iossifidis},
year = {2014},
date = {2014-01-01},
booktitle = {International Conference on Pervasive and Embedded and Communication Systems, 2014, PECCS2014},
abstract = {In the context of the increasing number of collaborative workplaces in industrial environments, where humans and robots sharing the same workplace, safety and intuitive interaction is a prerequisite. This means, that the robot can (1) have contact with his own body and the surrounding objects, (2) the motion of the robot can be corrected online by the human user just by touching his artificial skin or (3) interrupt the action in dangerous situations. In the current work we introduce a haptic interface (artificial skin) which is utilized to cover the arms of an anthropomorphic robotic assistant. The touched induced input of the artificial skin is interpreted and fed into the motor control algorithm to generate the desired motion and to avoid harm for human and machine.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
Ioannis Iossifidis
Development of a Haptic Interface for Safe Human Robot Collaboration Inproceedings
In: International Conference on Pervasive and Embedded and Communication Systems, 2012, PECCS2014, 2014.
BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2014c,
title = {Development of a Haptic Interface for Safe Human Robot Collaboration},
author = {Ioannis Iossifidis},
year = {2014},
date = {2014-01-01},
booktitle = {International Conference on Pervasive and Embedded and Communication Systems, 2012, PECCS2014},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
Ioannis Iossifidis
Development of a Haptic Interface for Safe Human Robot Collaboration Inproceedings
In: International Conference on Pervasive and Embedded and Communication Systems, 2012, PECCS2014, 2014.
BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2014e,
title = {Development of a Haptic Interface for Safe Human Robot Collaboration},
author = {Ioannis Iossifidis},
year = {2014},
date = {2014-01-01},
booktitle = {International Conference on Pervasive and Embedded and Communication Systems, 2012, PECCS2014},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
2013
I Iossifidis
Utilizing artificial skin for direct physical interaction Inproceedings
In: 2013 IEEE International Conference on Robotics and Biomimetics, ROBIO 2013, 2013.
Abstract | Links | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2013c,
title = {Utilizing artificial skin for direct physical interaction},
author = {I Iossifidis},
doi = {10.1109/ROBIO.2013.6739562},
year = {2013},
date = {2013-01-01},
booktitle = {2013 IEEE International Conference on Robotics and Biomimetics, ROBIO 2013},
abstract = {Focusing on the development of flexible robots for industrial and household environments, we identify intuitive teaching as the key feature and direct physical interaction and guidance as the most important interface. In the current work we introduce a multi redundant robotic assistant equipped with a touch sensitive skin around the upper- and the forearm, in order to incorporate contact forces into the arm control. A context-sensitive interpretation of the contact forces is being used to guide the attention of the robot, to avoid obstacles and to move the robot arm directly by the human operator. textcopyright 2013 IEEE.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
Ioannis Iossifidis
Utilizing Artificial Skin for Direct Physical Interaction Inproceedings
In: Proc. IEEE/RSJ International Conference on Robotics and Biomimetics (RoBio2013), 2013.
Abstract | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2013d,
title = {Utilizing Artificial Skin for Direct Physical Interaction},
author = {Ioannis Iossifidis},
year = {2013},
date = {2013-01-01},
booktitle = {Proc. IEEE/RSJ International Conference on Robotics and Biomimetics (RoBio2013)},
abstract = {Autonomous robots with limited computational capacity call for control approaches that generate meaningful, goal-directed behavior without using a large amount of resources. The attractor dynamics approach to movement generation is a framework that links sensor data to motor commands via coupled dynamical systems that have attractors at behaviorally desired states. The low computational demands leave enough system resources for higher level function like forming a sequence of local goals to reach a distant one. The comparatively high performance of local behavior generation allows the global planning to be relatively simple. In the present paper, we apply this approach to generate walking trajectories for a small humanoid robot, the Aldebaran Nao, that are goal-directed and avoid obstacles. The sensor information is a single camera in the head of the robot. The limited field of vision is compensated by head movements. The design of the dynamical system for motion generation and the choice of state variable makes a computationally expensive scene representation or local map building unnecessary.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
Ioannis Iossifidis
Utilizing Artificial Skin for Direct Physical Interaction Inproceedings
In: Proc. IEEE/RSJ International Conference on Robotics and Biomimetics (RoBio2013), 2013.
Abstract | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2013f,
title = {Utilizing Artificial Skin for Direct Physical Interaction},
author = {Ioannis Iossifidis},
year = {2013},
date = {2013-01-01},
booktitle = {Proc. IEEE/RSJ International Conference on Robotics and Biomimetics (RoBio2013)},
abstract = {Autonomous robots with limited computational capacity call for control approaches that generate meaningful, goal-directed behavior without using a large amount of resources. The attractor dynamics approach to movement generation is a framework that links sensor data to motor commands via coupled dynamical systems that have attractors at behaviorally desired states. The low computational demands leave enough system resources for higher level function like forming a sequence of local goals to reach a distant one. The comparatively high performance of local behavior generation allows the global planning to be relatively simple.
In the present paper, we apply this approach to generate walking trajectories for a small humanoid robot, the Aldebaran Nao, that are goal-directed and avoid obstacles. The sensor information is a single camera in the head of the robot. The limited field of vision is compensated by head movements. The design of the dynamical system for motion generation and the choice of state variable makes a computationally expensive scene representation or local map building unnecessary.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
In the present paper, we apply this approach to generate walking trajectories for a small humanoid robot, the Aldebaran Nao, that are goal-directed and avoid obstacles. The sensor information is a single camera in the head of the robot. The limited field of vision is compensated by head movements. The design of the dynamical system for motion generation and the choice of state variable makes a computationally expensive scene representation or local map building unnecessary.
2005
I Iossifidis; C Bruckhoff; C Theis; C Grote; C Faubel; G Schöner
A cooperative robotic assistant for human environments Book
2005, ISSN: 16107438.
Abstract | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@book{Iossifidis2005a,
title = {A cooperative robotic assistant for human environments},
author = {I Iossifidis and C Bruckhoff and C Theis and C Grote and C Faubel and G Schöner},
issn = {16107438},
year = {2005},
date = {2005-01-01},
booktitle = {Springer Tracts in Advanced Robotics},
volume = {14},
abstract = {CoRA is a robotic assistant whose task is to collaborate with a human operator on simple manipulation or handling tasks. Its sensory channels comprising vision, audition, haptics, and force sensing are used to extract perceptual information about speech, gestures and gaze of the operator, and object recognition. The anthropomorphic robot arm makes goal-directed movements to pick up and hand-over objects. The human operator may mechanically interact with the arm by pushing it away (haptics) or by taking an object out of the robot's gripper (force sensing). The design objective has been to exploit the human operator's intuition by modeling the mechanical structure, the senses, and the behaviors of the assistant on human anatomy, human perception, and human motor behavior. textcopyright Springer-Verlag Berlin Heidelberg 2005.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {book}
}
2002
I Iossifidis; A Steinhage
Controlling a redundant robot arm by means of a haptic sensor Book
2002, ISSN: 00835560.
Abstract | BibTeX | Tags: Artificial skin, direct physical interaction, haptic interface, Inverse kinematics, Man-machine-interaction, Robot manipulator control
@book{Iossifidis2002d,
title = {Controlling a redundant robot arm by means of a haptic sensor},
author = {I Iossifidis and A Steinhage},
issn = {00835560},
year = {2002},
date = {2002-01-01},
booktitle = {VDI Berichte},
number = {1679},
abstract = {This paper describes the hardware- and software-implementation of a touch-sensitive device on the manipulator arm of our anthropomorphic robot CORA. This so-called artificial skin is used to control the configuration of the manipulator while the robot is grasping for objects. By exploiting redundant degrees of freedom, this operator-induced movement constraint can be accounted for without changing the configuration of the end-effector.},
keywords = {Artificial skin, direct physical interaction, haptic interface, Inverse kinematics, Man-machine-interaction, Robot manipulator control},
pubstate = {published},
tppubtype = {book}
}
I Iossifidis; C Bruckhoff; C Theis; C Grote; C Faubel; G Schöner
CORA: An anthropomorphic robot assistant for human environment Inproceedings
In: Proceedings. 11th IEEE International Workshop on Robot and Human Interactive Communication, pp. 392–398, IEEE, 2002, ISBN: 0-7803-7545-9.
Abstract | Links | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2002ab,
title = {CORA: An anthropomorphic robot assistant for human environment},
author = {I Iossifidis and C Bruckhoff and C Theis and C Grote and C Faubel and G Schöner},
doi = {10.1109/ROMAN.2002.1045654},
isbn = {0-7803-7545-9},
year = {2002},
date = {2002-01-01},
booktitle = {Proceedings. 11th IEEE International Workshop on Robot and Human Interactive Communication},
pages = {392--398},
publisher = {IEEE},
abstract = {We describe the general concept, system architecture, hardware, and the behavioral abilities of CORA (Cooperative Robot Assistant), an autonomous nonmobile robot assistant. Outgoing from our basic assumption that the behavior to perform determines the internal and external structure of the behaving system, we have designed CORA anthropomorphic to allow for humanlike behavioral strategies in solving complex tasks. Although CORA was built as a prototype of a service robot system to assist a human partner in industrial assembly tasks, we will show that CORA's behavioral abilities are also conferrable in a household environment. After the description of the hardware platform and the basic concepts of our approach, we present some experimental results by means of an assembly task.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}