Publications
2001
Christoph Theis; Ioannis Iossifidis; Axel Steinhage
Image processing methods for interactive robot control Inproceedings
In: Proc. 10th IEEE International Workshop on Robot and Human Interactive Communication, pp. 424–429, 2001.
Abstract | Links | BibTeX | Tags: active stereo camera system, human hand tracking, human pointing gesture, image processing, interactive robot control, multi-modal man-machine interaction system
@inproceedings{Theis2001,
title = {Image processing methods for interactive robot control},
author = {Christoph Theis and Ioannis Iossifidis and Axel Steinhage},
doi = {10.1109/ROMAN.2001.981941},
year = {2001},
date = {2001-01-01},
urldate = {2001-01-01},
booktitle = {Proc. 10th IEEE International Workshop on Robot and Human Interactive Communication},
pages = {424--429},
abstract = {In this paper we describe a straight forward technique for tracking a human hand based on images acquired by an active stereo camera system. We demonstrate the implementation of this method on an anthropomorphic assistance robot as part of a multi-modal man-machine interaction system: detecting the hand-position, the robot can interprete a human pointing gesture as the specification of a target object to grasp},
keywords = {active stereo camera system, human hand tracking, human pointing gesture, image processing, interactive robot control, multi-modal man-machine interaction system},
pubstate = {published},
tppubtype = {inproceedings}
}
Werner Seelen; Ioannis Iossifidis; Axel Steinhage
Visually guided behavior of an autonomous robot with a neuronal architecture Inproceedings
In: 2001 IEEE International Symposium on Computational Intelligence in Robotics and Automation, CIRA 2001, IEEE Banff, Canada, 2001.
Abstract | BibTeX | Tags: active stereo camera system, Autonomous robotics, human hand tracking, human pointing gesture, image processing, interactive robot control, multi-modal man-machine interaction system
@inproceedings{Seelen2001,
title = {Visually guided behavior of an autonomous robot with a neuronal architecture},
author = {Werner Seelen and Ioannis Iossifidis and Axel Steinhage},
year = {2001},
date = {2001-01-01},
booktitle = {2001 IEEE International Symposium on Computational Intelligence in Robotics and Automation, CIRA 2001},
address = {Banff, Canada},
organization = {IEEE},
series = {CIRA 2001, Workshop Vision-Based Object Recognition in Robotics},
abstract = {We constructed two Robot Systems. Both have a "neuronal architecture". The first (ARNOLD) is able to explore visually an unknown environement, to navigate in this environment and to use his 7DOF-arm to grasp and transport objects. The system can be guided by gestures and a limited set of spoken commands. The second system (CORA) is stationary and shall cooperate with a human at a production line in an interactive assembly process. Our contribution is focussed on to the vision problems. In both cases we use a 2DOF stereo camera system. The visual navigation is based on "place fields" obtained by correlating the current view with stored views captured at strategic points. This can be combined with a trajectory finding on the basis of nonlinear dynamics. Obstacles are avoided by repellors in the trajectory-equation and by inverse perspective mapping. Position and form of objects are evaluated in the sense of finding an appropriate grasping configuration for selected objects. The scene analysis in the CORA-system presupposes the estimation of the view-direction of the human partner. Than a limited set of objects can be detected and tracked if this is necessary (Hausdorff distance). The actual analysis of the entire scene relies on the relation of the detected objects to eachother within the environement, on the task to be fulfilled and on the step that is reached within the entire task. The different necessary estimations and detections within the sequences are coded in terms of Neural fields. In this way the visual perception, the interactive communication and the visually guided behaviour is realised in the same formate.},
keywords = {active stereo camera system, Autonomous robotics, human hand tracking, human pointing gesture, image processing, interactive robot control, multi-modal man-machine interaction system},
pubstate = {published},
tppubtype = {inproceedings}
}