Publications
2014
Ioannis Iossifidis
Simulated Framework for the Development and Evaluation of Redundant Robotic Systems Inproceedings
In: International Conference on Pervasive and Embedded and Communication Systems, 2014, PECCS2014, 2014.
Abstract | BibTeX | Tags: Autonomous robotics, man machine interaction, simulated reality
@inproceedings{Iossifidis2014a,
title = {Simulated Framework for the Development and Evaluation of Redundant Robotic Systems},
author = {Ioannis Iossifidis},
year = {2014},
date = {2014-01-01},
booktitle = {International Conference on Pervasive and Embedded and Communication Systems, 2014, PECCS2014},
abstract = {In the current work we present a simulated environment for the development and evaluation of multi redundant open chain manipulators. The framework is implemented in Matlab and provides solutions for the kinematics and dynamics of an arbitrary open chain manipulator. For a anthropomorphic trunk-shoulder-arm configura- tion with in total nine degree of freedoms, a closed form solution of the inverse kinematics problem is derived. The attractor dynamics approach to motion generation was evaluated within this framework and the results are verified on the real anthropomorphic robotic assistant Cora.},
keywords = {Autonomous robotics, man machine interaction, simulated reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Ioannis Iossifidis
Development of a Haptic Interface for Safe Human Robot Collaboration Inproceedings
In: International Conference on Pervasive and Embedded and Communication Systems, 2014, PECCS2014, 2014.
Abstract | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2014b,
title = {Development of a Haptic Interface for Safe Human Robot Collaboration},
author = {Ioannis Iossifidis},
year = {2014},
date = {2014-01-01},
booktitle = {International Conference on Pervasive and Embedded and Communication Systems, 2014, PECCS2014},
abstract = {In the context of the increasing number of collaborative workplaces in industrial environments, where humans and robots sharing the same workplace, safety and intuitive interaction is a prerequisite. This means, that the robot can (1) have contact with his own body and the surrounding objects, (2) the motion of the robot can be corrected online by the human user just by touching his artificial skin or (3) interrupt the action in dangerous situations. In the current work we introduce a haptic interface (artificial skin) which is utilized to cover the arms of an anthropomorphic robotic assistant. The touched induced input of the artificial skin is interpreted and fed into the motor control algorithm to generate the desired motion and to avoid harm for human and machine.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
Ioannis Iossifidis
Development of a Haptic Interface for Safe Human Robot Collaboration Inproceedings
In: International Conference on Pervasive and Embedded and Communication Systems, 2012, PECCS2014, 2014.
BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2014c,
title = {Development of a Haptic Interface for Safe Human Robot Collaboration},
author = {Ioannis Iossifidis},
year = {2014},
date = {2014-01-01},
booktitle = {International Conference on Pervasive and Embedded and Communication Systems, 2012, PECCS2014},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
Ioannis Iossifidis
Simulated Framework for the Development and Evaluation of Redundant Robotic Systems Inproceedings
In: International Conference on Pervasive and Embedded and Communication Systems, 2012, PECCS2014, 2014.
BibTeX | Tags: Autonomous robotics, man machine interaction, simulated reality
@inproceedings{Iossifidis2014ab,
title = {Simulated Framework for the Development and Evaluation of Redundant Robotic Systems},
author = {Ioannis Iossifidis},
year = {2014},
date = {2014-01-01},
booktitle = {International Conference on Pervasive and Embedded and Communication Systems, 2012, PECCS2014},
keywords = {Autonomous robotics, man machine interaction, simulated reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Ioannis Iossifidis
Development of a Haptic Interface for Safe Human Robot Collaboration Inproceedings
In: International Conference on Pervasive and Embedded and Communication Systems, 2012, PECCS2014, 2014.
BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2014e,
title = {Development of a Haptic Interface for Safe Human Robot Collaboration},
author = {Ioannis Iossifidis},
year = {2014},
date = {2014-01-01},
booktitle = {International Conference on Pervasive and Embedded and Communication Systems, 2012, PECCS2014},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
2013
I Iossifidis
Utilizing artificial skin for direct physical interaction Inproceedings
In: 2013 IEEE International Conference on Robotics and Biomimetics, ROBIO 2013, 2013.
Abstract | Links | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2013c,
title = {Utilizing artificial skin for direct physical interaction},
author = {I Iossifidis},
doi = {10.1109/ROBIO.2013.6739562},
year = {2013},
date = {2013-01-01},
booktitle = {2013 IEEE International Conference on Robotics and Biomimetics, ROBIO 2013},
abstract = {Focusing on the development of flexible robots for industrial and household environments, we identify intuitive teaching as the key feature and direct physical interaction and guidance as the most important interface. In the current work we introduce a multi redundant robotic assistant equipped with a touch sensitive skin around the upper- and the forearm, in order to incorporate contact forces into the arm control. A context-sensitive interpretation of the contact forces is being used to guide the attention of the robot, to avoid obstacles and to move the robot arm directly by the human operator. textcopyright 2013 IEEE.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
Ioannis Iossifidis
Utilizing Artificial Skin for Direct Physical Interaction Inproceedings
In: Proc. IEEE/RSJ International Conference on Robotics and Biomimetics (RoBio2013), 2013.
Abstract | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2013d,
title = {Utilizing Artificial Skin for Direct Physical Interaction},
author = {Ioannis Iossifidis},
year = {2013},
date = {2013-01-01},
booktitle = {Proc. IEEE/RSJ International Conference on Robotics and Biomimetics (RoBio2013)},
abstract = {Autonomous robots with limited computational capacity call for control approaches that generate meaningful, goal-directed behavior without using a large amount of resources. The attractor dynamics approach to movement generation is a framework that links sensor data to motor commands via coupled dynamical systems that have attractors at behaviorally desired states. The low computational demands leave enough system resources for higher level function like forming a sequence of local goals to reach a distant one. The comparatively high performance of local behavior generation allows the global planning to be relatively simple. In the present paper, we apply this approach to generate walking trajectories for a small humanoid robot, the Aldebaran Nao, that are goal-directed and avoid obstacles. The sensor information is a single camera in the head of the robot. The limited field of vision is compensated by head movements. The design of the dynamical system for motion generation and the choice of state variable makes a computationally expensive scene representation or local map building unnecessary.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
Ioannis Iossifidis
Utilizing Artificial Skin for Direct Physical Interaction Inproceedings
In: Proc. IEEE/RSJ International Conference on Robotics and Biomimetics (RoBio2013), 2013.
Abstract | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2013f,
title = {Utilizing Artificial Skin for Direct Physical Interaction},
author = {Ioannis Iossifidis},
year = {2013},
date = {2013-01-01},
booktitle = {Proc. IEEE/RSJ International Conference on Robotics and Biomimetics (RoBio2013)},
abstract = {Autonomous robots with limited computational capacity call for control approaches that generate meaningful, goal-directed behavior without using a large amount of resources. The attractor dynamics approach to movement generation is a framework that links sensor data to motor commands via coupled dynamical systems that have attractors at behaviorally desired states. The low computational demands leave enough system resources for higher level function like forming a sequence of local goals to reach a distant one. The comparatively high performance of local behavior generation allows the global planning to be relatively simple.
In the present paper, we apply this approach to generate walking trajectories for a small humanoid robot, the Aldebaran Nao, that are goal-directed and avoid obstacles. The sensor information is a single camera in the head of the robot. The limited field of vision is compensated by head movements. The design of the dynamical system for motion generation and the choice of state variable makes a computationally expensive scene representation or local map building unnecessary.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
In the present paper, we apply this approach to generate walking trajectories for a small humanoid robot, the Aldebaran Nao, that are goal-directed and avoid obstacles. The sensor information is a single camera in the head of the robot. The limited field of vision is compensated by head movements. The design of the dynamical system for motion generation and the choice of state variable makes a computationally expensive scene representation or local map building unnecessary.
2010
Yulia Sandamirskaya; John Lipinski; Ioannis Iossifidis; G Schöner
Natural human-robot interaction through spatial language: a dynamic neural fields approach Inproceedings
In: Proc. 19th IEEE International Workshop on Robot and Human Interactive Communication (ROMAN 2010), pp. 600–607, IEEE, 2010, ISSN: 1944-9445.
Links | BibTeX | Tags: Autonomous robotics, behavior generation, dynamical systems, man machine interaction, movement model, speech recognition
@inproceedings{Sandamirskayasubmitted,
title = {Natural human-robot interaction through spatial language: a dynamic neural fields approach},
author = {Yulia Sandamirskaya and John Lipinski and Ioannis Iossifidis and G Schöner},
url = {http://ieeexplore.ieee.org/xpls/abs_all.jsp?arnumber=5598671},
issn = {1944-9445},
year = {2010},
date = {2010-01-01},
booktitle = {Proc. 19th IEEE International Workshop on Robot and Human Interactive Communication (ROMAN 2010)},
pages = {600--607},
publisher = {IEEE},
keywords = {Autonomous robotics, behavior generation, dynamical systems, man machine interaction, movement model, speech recognition},
pubstate = {published},
tppubtype = {inproceedings}
}
Stephan K U Zibner; Christian Faubel; Ioannis Iossifidis; Gregor Schöner
Scene Representation Based on Dynamic Field Theory: From Human to Machine Journal Article
In: Front. Comput. Neurosci. Conference Abstract: Bernstein Conference on Computational Neuroscience, 2010.
Links | BibTeX | Tags: dynamic neural field, dynamical systems, man machine interaction, scene representation, speech recognition
@article{Zibner2010a,
title = {Scene Representation Based on Dynamic Field Theory: From Human to Machine},
author = {Stephan K U Zibner and Christian Faubel and Ioannis Iossifidis and Gregor Schöner},
doi = {10.3389/conf.fncom.2010.51.00019},
year = {2010},
date = {2010-01-01},
journal = {Front. Comput. Neurosci. Conference Abstract: Bernstein Conference on Computational Neuroscience},
keywords = {dynamic neural field, dynamical systems, man machine interaction, scene representation, speech recognition},
pubstate = {published},
tppubtype = {article}
}
Stephan S K U Zibner; Christian Faubel; Ioannis Iossifidis; Gregor Schöner
Scene Representation for Anthropomorphic Robots: A Dynamic Neural Field Approach Inproceedings
In: ISR / ROBOTIK 2010, VDE VERLAG GmbH, Munich, Germany, 2010.
Abstract | Links | BibTeX | Tags: Autonomous robotics, dynamic neural field, dynamical systems, man machine interaction, scene representation, speech recognition
@inproceedings{Zibner2010ab,
title = {Scene Representation for Anthropomorphic Robots: A Dynamic Neural Field Approach},
author = {Stephan S K U Zibner and Christian Faubel and Ioannis Iossifidis and Gregor Schöner},
url = {http://www.vde-verlag.de/proceedings-en/453273138.html},
year = {2010},
date = {2010-01-01},
booktitle = {ISR / ROBOTIK 2010},
number = {Isr},
publisher = {VDE VERLAG GmbH},
address = {Munich, Germany},
abstract = {An internal representation of a scene is essential to generate actions on scene objects. A stabilized storage of object location and features offers the flexibility to process queries phrased in human-based terms relating to objects, which may not be in the current camera view. Scene representation is therefore an internal representation of the surrounding world that is stabilized against head and body movement. It contains associated information about location and features of objects. Because objects and bodies move, scene representation is not a one-time process, but a constantly scene- adapting mechanism of scanning for, storing, updating, and deleting information.
Our novel architecture incorporates the generation of autonomous scanning sequences on real-time camera images. The head can then be oriented towards a selected object and the color feature can be extracted. Object location and feature information are associatively stored in a three-dimensional Dynamic Neural Field. Changes in the scene, even for multiple objects, can be tracked simultaneously. The stored information is used to generate behavior for cued recall. Cues can be table regions, features, or object labels. The robot demonstrates a successful recall by centering its gaze on the stated object.},
keywords = {Autonomous robotics, dynamic neural field, dynamical systems, man machine interaction, scene representation, speech recognition},
pubstate = {published},
tppubtype = {inproceedings}
}
Our novel architecture incorporates the generation of autonomous scanning sequences on real-time camera images. The head can then be oriented towards a selected object and the color feature can be extracted. Object location and feature information are associatively stored in a three-dimensional Dynamic Neural Field. Changes in the scene, even for multiple objects, can be tracked simultaneously. The stored information is used to generate behavior for cued recall. Cues can be table regions, features, or object labels. The robot demonstrates a successful recall by centering its gaze on the stated object.
Stephan K U Zibner; Christian Faubel; John P Spencer; Ioannis Iossifidis; Gregor Schöner
Scenes and Tracking with Dynamic Neural Fields: How to Update a Robotic Scene Representation Inproceedings
In: Proc. Int. Conf. on Development and Learning (ICDL10), 2010.
BibTeX | Tags: Autonomous robotics, dynamic neural field, dynamical systems, man machine interaction, scene representation, speech recognition
@inproceedings{Zibner2010c,
title = {Scenes and Tracking with Dynamic Neural Fields: How to Update a Robotic Scene Representation},
author = {Stephan K U Zibner and Christian Faubel and John P Spencer and Ioannis Iossifidis and Gregor Schöner},
year = {2010},
date = {2010-01-01},
booktitle = {Proc. Int. Conf. on Development and Learning (ICDL10)},
keywords = {Autonomous robotics, dynamic neural field, dynamical systems, man machine interaction, scene representation, speech recognition},
pubstate = {published},
tppubtype = {inproceedings}
}
Stephan K U Zibner; Christian Faubel; Ioannis Iossifidis; Gregor Schöner
Scene Representation with Dynamic Neural Fields: An Example of Complex Cognitive Architectures Based on Dynamic Neural Field Theory Inproceedings
In: Proc. Int. Conf. on Development and Learning (ICDL10), 2010.
BibTeX | Tags: Autonomous robotics, dynamic neural field, dynamical systems, man machine interaction, scene representation, speech recognition
@inproceedings{Zibnersubmittedb,
title = {Scene Representation with Dynamic Neural Fields: An Example of Complex Cognitive Architectures Based on Dynamic Neural Field Theory},
author = {Stephan K U Zibner and Christian Faubel and Ioannis Iossifidis and Gregor Schöner},
year = {2010},
date = {2010-01-01},
booktitle = {Proc. Int. Conf. on Development and Learning (ICDL10)},
keywords = {Autonomous robotics, dynamic neural field, dynamical systems, man machine interaction, scene representation, speech recognition},
pubstate = {published},
tppubtype = {inproceedings}
}
Stephan Zibner; Christian Faubel; Ioannis Iossifidis; Gregor Schöner; John P Spencer
Scene and Tracking with Dynamic Neural Field Approach Inproceedings
In: ISR / ROBOTIK 2010, Munich, Germany, 2010.
Abstract | BibTeX | Tags: Autonomous robotics, dynamic neural field, dynamical systems, man machine interaction, scene representation, speech recognition
@inproceedings{Zibneri,
title = {Scene and Tracking with Dynamic Neural Field Approach},
author = {Stephan Zibner and Christian Faubel and Ioannis Iossifidis and Gregor Schöner and John P Spencer},
year = {2010},
date = {2010-01-01},
booktitle = {ISR / ROBOTIK 2010},
address = {Munich, Germany},
abstract = {An internal representation of a scene is essential to generate actions on scene objects. A stabilized storage of object location and features offers the flexibility to process queries phrased in human-based terms relating to objects, which may not be in the current camera view. Scene representation is therefore an internal representation of the surrounding world that is stabilized against head and body movement. It contains associated information about location and features of objects. Because objects and bodies move, scene representation is not a one-time process, but a constantly scene- adapting mechanism of scanning for, storing, updating, and deleting information.
Our novel architecture incorporates the generation of autonomous scanning sequences on real-time camera images. The head can then be oriented towards a selected object and the color feature can be extracted. Object location and feature information are associatively stored in a three-dimensional Dynamic Neural Field. Changes in the scene, even for multiple objects, can be tracked simultaneously. The stored information is used to generate behavior for cued recall. Cues can be table regions, features, or object labels. The robot demonstrates a successful recall by centering its gaze on the stated object.},
keywords = {Autonomous robotics, dynamic neural field, dynamical systems, man machine interaction, scene representation, speech recognition},
pubstate = {published},
tppubtype = {inproceedings}
}
Our novel architecture incorporates the generation of autonomous scanning sequences on real-time camera images. The head can then be oriented towards a selected object and the color feature can be extracted. Object location and feature information are associatively stored in a three-dimensional Dynamic Neural Field. Changes in the scene, even for multiple objects, can be tracked simultaneously. The stored information is used to generate behavior for cued recall. Cues can be table regions, features, or object labels. The robot demonstrates a successful recall by centering its gaze on the stated object.
2005
I Iossifidis; C Bruckhoff; C Theis; C Grote; C Faubel; G Schöner
A cooperative robotic assistant for human environments Book
2005, ISSN: 16107438.
Abstract | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@book{Iossifidis2005a,
title = {A cooperative robotic assistant for human environments},
author = {I Iossifidis and C Bruckhoff and C Theis and C Grote and C Faubel and G Schöner},
issn = {16107438},
year = {2005},
date = {2005-01-01},
booktitle = {Springer Tracts in Advanced Robotics},
volume = {14},
abstract = {CoRA is a robotic assistant whose task is to collaborate with a human operator on simple manipulation or handling tasks. Its sensory channels comprising vision, audition, haptics, and force sensing are used to extract perceptual information about speech, gestures and gaze of the operator, and object recognition. The anthropomorphic robot arm makes goal-directed movements to pick up and hand-over objects. The human operator may mechanically interact with the arm by pushing it away (haptics) or by taking an object out of the robot's gripper (force sensing). The design objective has been to exploit the human operator's intuition by modeling the mechanical structure, the senses, and the behaviors of the assistant on human anatomy, human perception, and human motor behavior. textcopyright Springer-Verlag Berlin Heidelberg 2005.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {book}
}
I Iossifidis; G Lawitzky; S Knoop; R Zöllner
Towards benchmarking of domestic robotic assistants Book
2005, ISSN: 16107438.
Abstract | BibTeX | Tags: Autonomous robotics, benchmarking, human robot collaboration, man machine interaction
@book{Iossifidis2005b,
title = {Towards benchmarking of domestic robotic assistants},
author = {I Iossifidis and G Lawitzky and S Knoop and R Zöllner},
issn = {16107438},
year = {2005},
date = {2005-01-01},
booktitle = {Springer Tracts in Advanced Robotics},
volume = {14},
abstract = {As service robotics research advances rapidly, availability of objective, reproducible test specifications and evaluation criteria and also of benchmarking is more and more felt to be desirable in the community. As a first step towards benchmarking, in this paper we propose a formalization of tests - exemplified for domestic grasp & place tasks. The underlying philosophy of our approach is to confront the robot system in a black-box manner with requirements of a "rational customer", and characterize the performance of the system in an objective way by the outcomes of a test-suite tailored to this scenario. A formalized single test description consists of a clear and reproducible specification of the robot's task and the full context on the one hand, and a number of figures which objectively characterize the test result on the other hand. We illustrate this methodology for the domestic assistance scenario. textcopyright Springer-Verlag Berlin Heidelberg 2005.},
keywords = {Autonomous robotics, benchmarking, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {book}
}
2004
Ioannis Iossifidis; Gregor Schöner; Gregor Schoner
In: Proc. IEEE International Conference on Robotics and Automation ICRA '04, pp. 4295––4300 Vol.5, 2004, ISSN: 1050-4729.
Abstract | Links | BibTeX | Tags: autonomous reaching, collision avoidance, end effector shift, end effectors, man machine interaction, manipulator dynamics, obstacle avoidance, robotic assistant, time varying environment, time-varying systems
@inproceedings{Iossifidis2004b,
title = {Autonomous reaching and obstacle avoidance with the anthropomorphic arm of a robotic assistant using the attractor dynamics approach},
author = {Ioannis Iossifidis and Gregor Schöner and Gregor Schoner},
doi = {10.1109/ROBOT.2004.1302393},
issn = {1050-4729},
year = {2004},
date = {2004-01-01},
booktitle = {Proc. IEEE International Conference on Robotics and Automation ICRA '04},
volume = {5},
pages = {4295----4300 Vol.5},
abstract = {To enable a robotic assistant to autonomously reach for and transport objects while avoiding obstacles we have generalized the attractor dynamics approach established for vehicles to trajectory formation in robot arms. This approach is able to deal with the time-varying environments that occur when a human operator moves in a shared workspace. Stable fixed points (attractors) for the heading direction of the end-effector shift during movement and are being tracked by the system. This enables the attractor dynamics approach to avoid the spurious states that hamper potential field methods. Separating planning and control computationally, the approach is also simpler to implement. The stability properties of the movement plan make it possible to deal with fluctuating and imprecise sensory information. We implement this approach on a seven degree of freedom anthropomorphic arm reaching for objects on a working surface. We use an exact solution of the inverse kinematics, which enables us to steer the spatial position of the elbow clear of obstacles. The straight-line trajectories of the end-effector that emerge as long as the arm is far from obstacles make the movement goals of the robotic assistant predictable for the human operator, improving man-machine interaction.},
keywords = {autonomous reaching, collision avoidance, end effector shift, end effectors, man machine interaction, manipulator dynamics, obstacle avoidance, robotic assistant, time varying environment, time-varying systems},
pubstate = {published},
tppubtype = {inproceedings}
}
Ioannis Iossifidis; Gisbert Lawitzky; Stephan Knoop; Raoul Zöllner
Towards Benchmarking of Domestic Robotic Assistants Incollection
In: Prassler, Erwin; Lawitzky, Gisbert; Stopp, Andreas; Grunwald, Gerhard; Hägele, Martin; Dillmann, Rüdiger; Iossifidis, Ioannis (Ed.): Advances in Human Robot Interaction, vol. 14/2004, no. ISBN: 3-540-23211-7,, pp. 403–414, Springer Press, 2004.
Abstract | Links | BibTeX | Tags: Autonomous robotics, benchmarking, human robot collaboration, man machine interaction
@incollection{Iossifidis2004c,
title = {Towards Benchmarking of Domestic Robotic Assistants},
author = {Ioannis Iossifidis and Gisbert Lawitzky and Stephan Knoop and Raoul Zöllner},
editor = {Erwin Prassler and Gisbert Lawitzky and Andreas Stopp and Gerhard Grunwald and Martin Hägele and Rüdiger Dillmann and Ioannis Iossifidis},
url = {http://www.springerlink.com/index/AB4F63B9DADFE299},
year = {2004},
date = {2004-01-01},
booktitle = {Advances in Human Robot Interaction},
volume = {14/2004},
number = {ISBN: 3-540-23211-7,},
pages = {403--414},
publisher = {Springer Press},
chapter = {7},
series = {Springer Tracts in Advanced Robotics STAR},
abstract = {As service robotics research advances rapidly, availability of objective, reproducible test specifications and evaluation criteria and also of benchmarking is more and more felt to be desirable in the community. As a first step towards benchmarking, in this paper we propose a formalization of tests - exemplified for domestic grasp&place tasks. The underlying philosophy of our approach is to confront the robot system in a black-box manner with requirements of a ldquorational customerrdquo, and characterize the performance of the system in an objective way by the outcomes of a test-suite tailored to this scenario. A formalized single test description consists of a clear and reproducible specification of the robotrsquos task and the full context on the one hand, and a number of figures which objectively characterize the test result on the other hand. We illustrate this methodology for the domestic assistance scenario.},
keywords = {Autonomous robotics, benchmarking, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {incollection}
}
2002
I Iossifidis; C Bruckhoff; C Theis; C Grote; C Faubel; G Schöner
CORA: An anthropomorphic robot assistant for human environment Inproceedings
In: Proceedings. 11th IEEE International Workshop on Robot and Human Interactive Communication, pp. 392–398, IEEE, 2002, ISBN: 0-7803-7545-9.
Abstract | Links | BibTeX | Tags: Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction
@inproceedings{Iossifidis2002ab,
title = {CORA: An anthropomorphic robot assistant for human environment},
author = {I Iossifidis and C Bruckhoff and C Theis and C Grote and C Faubel and G Schöner},
doi = {10.1109/ROMAN.2002.1045654},
isbn = {0-7803-7545-9},
year = {2002},
date = {2002-01-01},
booktitle = {Proceedings. 11th IEEE International Workshop on Robot and Human Interactive Communication},
pages = {392--398},
publisher = {IEEE},
abstract = {We describe the general concept, system architecture, hardware, and the behavioral abilities of CORA (Cooperative Robot Assistant), an autonomous nonmobile robot assistant. Outgoing from our basic assumption that the behavior to perform determines the internal and external structure of the behaving system, we have designed CORA anthropomorphic to allow for humanlike behavioral strategies in solving complex tasks. Although CORA was built as a prototype of a service robot system to assist a human partner in industrial assembly tasks, we will show that CORA's behavioral abilities are also conferrable in a household environment. After the description of the hardware platform and the basic concepts of our approach, we present some experimental results by means of an assembly task.},
keywords = {Autonomous robotics, direct physical interaction, haptic interface, human robot collaboration, man machine interaction},
pubstate = {published},
tppubtype = {inproceedings}
}